diff --git a/.gitattributes b/.gitattributes index d7e0ab2f36f5db..13289182400109 100644 --- a/.gitattributes +++ b/.gitattributes @@ -69,7 +69,7 @@ Doc/library/token-list.inc generated Include/internal/pycore_ast.h generated Include/internal/pycore_ast_state.h generated Include/internal/pycore_opcode.h generated -Include/internal/pycore_runtime_init_generated.h generated +Include/internal/pycore_*_generated.h generated Include/opcode.h generated Include/token.h generated Lib/keyword.py generated @@ -82,6 +82,7 @@ Parser/parser.c generated Parser/token.c generated Programs/test_frozenmain.h generated Python/Python-ast.c generated +Python/generated_cases.c.h generated Python/opcode_targets.h generated Python/stdlib_module_names.h generated Tools/peg_generator/pegen/grammar_parser.py generated diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 74081b2ef2e8af..5d30c0928e5ab8 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,7 +8,7 @@ .github/** @ezio-melotti # asyncio -**/*asyncio* @1st1 @asvetlov @gvanrossum +**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 # Core **/*context* @1st1 @@ -63,7 +63,7 @@ Python/traceback.c @iritkatriel # bytecode. **/*import*.c @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw **/*import*.py @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw -**/*importlib/resources/* @jaraco @warsaw @brettcannon +**/*importlib/resources/* @jaraco @warsaw @brettcannon @FFY00 **/importlib/metadata/* @jaraco @warsaw # Dates and times @@ -137,8 +137,6 @@ Lib/ast.py @isidentical **/*typing* @gvanrossum @Fidget-Spinner @JelleZijlstra @AlexWaygood -**/*asyncore @giampaolo -**/*asynchat @giampaolo **/*ftplib @giampaolo **/*shutil @giampaolo @@ -148,9 +146,14 @@ Lib/ast.py @isidentical **/*tomllib* @encukou +**/*sysconfig* @FFY00 + # macOS /Mac/ @python/macos-team **/*osx_support* @python/macos-team # pathlib **/*pathlib* @brettcannon + +# zipfile.Path +**/*zipfile/*_path.py @jaraco diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f0544c0962e123..a1bdfa0681e00f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -176,7 +176,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: - OPENSSL_VER: 1.1.1q + OPENSSL_VER: 1.1.1s PYTHONSTRICTEXTENSIONBUILD: 1 steps: - uses: actions/checkout@v3 @@ -235,7 +235,7 @@ jobs: strategy: fail-fast: false matrix: - openssl_ver: [1.1.1q, 3.0.5] + openssl_ver: [1.1.1s, 3.0.7] env: OPENSSL_VER: ${{ matrix.openssl_ver }} MULTISSL_DIR: ${{ github.workspace }}/multissl @@ -282,7 +282,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: - OPENSSL_VER: 1.1.1q + OPENSSL_VER: 1.1.1s PYTHONSTRICTEXTENSIONBUILD: 1 ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0 steps: diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 10e4cf074a590a..44a1f206df1eb9 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -50,9 +50,14 @@ jobs: run: make -C Doc/ venv - name: 'Check documentation' run: make -C Doc/ check + - name: 'Upload NEWS' + uses: actions/upload-artifact@v3 + with: + name: NEWS + path: Doc/build/NEWS - name: 'Build HTML documentation' run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html - - name: 'Upload' + - name: 'Upload docs' uses: actions/upload-artifact@v3 with: name: doc-html diff --git a/.gitignore b/.gitignore index 6934faa91e9874..d9c4a7972f076d 100644 --- a/.gitignore +++ b/.gitignore @@ -41,7 +41,6 @@ gmon.out .DS_Store *.exe -!Lib/distutils/command/*.exe # Ignore core dumps... but not Tools/msi/core/ or the like. core @@ -58,7 +57,6 @@ Doc/.venv/ Doc/env/ Doc/.env/ Include/pydtrace_probes.h -Lib/distutils/command/*.pdb Lib/lib2to3/*.pickle Lib/site-packages/* !Lib/site-packages/README.txt @@ -116,6 +114,7 @@ PCbuild/win32/ Tools/unicode/data/ /autom4te.cache /build/ +/builddir/ /config.cache /config.log /config.status diff --git a/Doc/Makefile b/Doc/Makefile index b09a9d754fb5aa..3d484ac3ae7937 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -21,10 +21,7 @@ PAPEROPT_letter = -D latex_elements.papersize=letterpaper ALLSPHINXOPTS = -b $(BUILDER) -d build/doctrees $(PAPEROPT_$(PAPER)) -j auto \ $(SPHINXOPTS) $(SPHINXERRORHANDLING) . build/$(BUILDER) $(SOURCES) -.PHONY: help build html htmlhelp latex text texinfo epub changes linkcheck \ - coverage doctest pydoc-topics htmlview clean clean-venv venv dist check serve \ - autobuild-dev autobuild-dev-html autobuild-stable autobuild-stable-html - +.PHONY: help help: @echo "Please use \`make ' where is one of" @echo " clean to remove build files" @@ -44,6 +41,7 @@ help: @echo " dist to create a \"dist\" directory with archived docs for download" @echo " check to run a check for frequent markup errors" +.PHONY: build build: -mkdir -p build # Look first for a Misc/NEWS file (building from a source release tarball @@ -70,38 +68,46 @@ build: $(SPHINXBUILD) $(ALLSPHINXOPTS) @echo +.PHONY: html html: BUILDER = html html: build @echo "Build finished. The HTML pages are in build/html." +.PHONY: htmlhelp htmlhelp: BUILDER = htmlhelp htmlhelp: build @echo "Build finished; now you can run HTML Help Workshop with the" \ "build/htmlhelp/pydoc.hhp project file." +.PHONY: latex latex: BUILDER = latex latex: build @echo "Build finished; the LaTeX files are in build/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." +.PHONY: text text: BUILDER = text text: build @echo "Build finished; the text files are in build/text." +.PHONY: texinfo texinfo: BUILDER = texinfo texinfo: build @echo "Build finished; the python.texi file is in build/texinfo." @echo "Run \`make info' in that directory to run it through makeinfo." +.PHONY: epub epub: BUILDER = epub epub: build @echo "Build finished; the epub files are in build/epub." +.PHONY: changes changes: BUILDER = changes changes: build @echo "The overview file is in build/changes." +.PHONY: linkcheck linkcheck: BUILDER = linkcheck linkcheck: @$(MAKE) build BUILDER=$(BUILDER) || { \ @@ -109,10 +115,12 @@ linkcheck: "or in build/$(BUILDER)/output.txt"; \ false; } +.PHONY: coverage coverage: BUILDER = coverage coverage: build @echo "Coverage finished; see c.txt and python.txt in build/coverage" +.PHONY: doctest doctest: BUILDER = doctest doctest: @$(MAKE) build BUILDER=$(BUILDER) || { \ @@ -120,20 +128,25 @@ doctest: "results in build/doctest/output.txt"; \ false; } +.PHONY: pydoc-topics pydoc-topics: BUILDER = pydoc-topics pydoc-topics: build @echo "Building finished; now run this:" \ "cp build/pydoc-topics/topics.py ../Lib/pydoc_data/topics.py" +.PHONY: htmlview htmlview: html $(PYTHON) -c "import os, webbrowser; webbrowser.open('file://' + os.path.realpath('build/html/index.html'))" +.PHONY: clean clean: clean-venv -rm -rf build/* +.PHONY: clean-venv clean-venv: rm -rf $(VENVDIR) +.PHONY: venv venv: @if [ -d $(VENVDIR) ] ; then \ echo "venv already exists."; \ @@ -145,6 +158,7 @@ venv: echo "The venv has been created in the $(VENVDIR) directory"; \ fi +.PHONY: dist dist: rm -rf dist mkdir -p dist @@ -199,12 +213,14 @@ dist: rm -r dist/python-$(DISTVERSION)-docs-texinfo rm dist/python-$(DISTVERSION)-docs-texinfo.tar +.PHONY: check check: # Check the docs and NEWS files with sphinx-lint. # Ignore the tools and venv dirs and check that the default role is not used. $(SPHINXLINT) -i tools -i $(VENVDIR) --enable default-role $(SPHINXLINT) --enable default-role ../Misc/NEWS.d/next/ +.PHONY: serve serve: @echo "The serve target was removed, use htmlview instead (see bpo-36329)" @@ -216,15 +232,18 @@ serve: # output files) # for development releases: always build +.PHONY: autobuild-dev autobuild-dev: make dist SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' # for quick rebuilds (HTML only) +.PHONY: autobuild-dev-html autobuild-dev-html: make html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' # for stable releases: only build if not in pre-release stage (alpha, beta) # release candidate downloads are okay, since the stable tree can be in that stage +.PHONY: autobuild-stable autobuild-stable: @case $(DISTVERSION) in *[ab]*) \ echo "Not building; $(DISTVERSION) is not a release version."; \ @@ -232,6 +251,7 @@ autobuild-stable: esac @make autobuild-dev +.PHONY: autobuild-stable-html autobuild-stable-html: @case $(DISTVERSION) in *[ab]*) \ echo "Not building; $(DISTVERSION) is not a release version."; \ diff --git a/Doc/_static/og-image.png b/Doc/_static/og-image.png new file mode 100644 index 00000000000000..0e80751e740387 Binary files /dev/null and b/Doc/_static/og-image.png differ diff --git a/Doc/c-api/call.rst b/Doc/c-api/call.rst index 6fb2e15196103d..4dc66e318cd12e 100644 --- a/Doc/c-api/call.rst +++ b/Doc/c-api/call.rst @@ -93,7 +93,7 @@ This is a pointer to a function with the following signature: and they must be unique. If there are no keyword arguments, then *kwnames* can instead be *NULL*. -.. c:macro:: PY_VECTORCALL_ARGUMENTS_OFFSET +.. data:: PY_VECTORCALL_ARGUMENTS_OFFSET If this flag is set in a vectorcall *nargsf* argument, the callee is allowed to temporarily change ``args[-1]``. In other words, *args* points to diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst index 9054e7ee3181a5..a6eb86f1a0b514 100644 --- a/Doc/c-api/code.rst +++ b/Doc/c-api/code.rst @@ -115,3 +115,51 @@ bound into a function. the free variables. On error, ``NULL`` is returned and an exception is raised. .. versionadded:: 3.11 + +.. c:function:: int PyCode_AddWatcher(PyCode_WatchCallback callback) + + Register *callback* as a code object watcher for the current interpreter. + Return an ID which may be passed to :c:func:`PyCode_ClearWatcher`. + In case of error (e.g. no more watcher IDs available), + return ``-1`` and set an exception. + + .. versionadded:: 3.12 + +.. c:function:: int PyCode_ClearWatcher(int watcher_id) + + Clear watcher identified by *watcher_id* previously returned from + :c:func:`PyCode_AddWatcher` for the current interpreter. + Return ``0`` on success, or ``-1`` and set an exception on error + (e.g. if the given *watcher_id* was never registered.) + + .. versionadded:: 3.12 + +.. c:type:: PyCodeEvent + + Enumeration of possible code object watcher events: + - ``PY_CODE_EVENT_CREATE`` + - ``PY_CODE_EVENT_DESTROY`` + + .. versionadded:: 3.12 + +.. c:type:: int (*PyCode_WatchCallback)(PyCodeEvent event, PyCodeObject* co) + + Type of a code object watcher callback function. + + If *event* is ``PY_CODE_EVENT_CREATE``, then the callback is invoked + after `co` has been fully initialized. Otherwise, the callback is invoked + before the destruction of *co* takes place, so the prior state of *co* + can be inspected. + + Users of this API should not rely on internal runtime implementation + details. Such details may include, but are not limited to, the exact + order and timing of creation and destruction of code objects. While + changes in these details may result in differences observable by watchers + (including whether a callback is invoked or not), it does not change + the semantics of the Python code being executed. + + If the callback returns with an exception set, it must return ``-1``; this + exception will be printed as an unraisable exception using + :c:func:`PyErr_WriteUnraisable`. Otherwise it should return ``0``. + + .. versionadded:: 3.12 diff --git a/Doc/c-api/frame.rst b/Doc/c-api/frame.rst index 46ce700abf1474..1ac8f03d6e48f8 100644 --- a/Doc/c-api/frame.rst +++ b/Doc/c-api/frame.rst @@ -19,6 +19,24 @@ can be used to get a frame object. See also :ref:`Reflection `. +.. c:var:: PyTypeObject PyFrame_Type + + The type of frame objects. + It is the same object as :py:class:`types.FrameType` in the Python layer. + + .. versionchanged:: 3.11 + + Previously, this type was only available after including + ````. + +.. c:function:: int PyFrame_Check(PyObject *obj) + + Return non-zero if *obj* is a frame object. + + .. versionchanged:: 3.11 + + Previously, this function was only available after including + ````. .. c:function:: PyFrameObject* PyFrame_GetBack(PyFrameObject *frame) @@ -79,6 +97,27 @@ See also :ref:`Reflection `. .. versionadded:: 3.11 +.. c:function:: PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name) + + Get the variable *name* of *frame*. + + * Return a :term:`strong reference` to the variable value on success. + * Raise :exc:`NameError` and return ``NULL`` if the variable does not exist. + * Raise an exception and return ``NULL`` on error. + + *name* type must be a :class:`str`. + + .. versionadded:: 3.12 + + +.. c:function:: PyObject* PyFrame_GetVarString(PyFrameObject *frame, const char *name) + + Similar to :c:func:`PyFrame_GetVar`, but the variable name is a C string + encoded in UTF-8. + + .. versionadded:: 3.12 + + .. c:function:: PyObject* PyFrame_GetLocals(PyFrameObject *frame) Get the *frame*'s ``f_locals`` attribute (:class:`dict`). diff --git a/Doc/c-api/function.rst b/Doc/c-api/function.rst index df88e85e518829..3cce18bdde3057 100644 --- a/Doc/c-api/function.rst +++ b/Doc/c-api/function.rst @@ -118,3 +118,63 @@ There are a few functions specific to Python functions. must be a dictionary or ``Py_None``. Raises :exc:`SystemError` and returns ``-1`` on failure. + + +.. c:function:: int PyFunction_AddWatcher(PyFunction_WatchCallback callback) + + Register *callback* as a function watcher for the current interpreter. + Return an ID which may be passed to :c:func:`PyFunction_ClearWatcher`. + In case of error (e.g. no more watcher IDs available), + return ``-1`` and set an exception. + + .. versionadded:: 3.12 + + +.. c:function:: int PyFunction_ClearWatcher(int watcher_id) + + Clear watcher identified by *watcher_id* previously returned from + :c:func:`PyFunction_AddWatcher` for the current interpreter. + Return ``0`` on success, or ``-1`` and set an exception on error + (e.g. if the given *watcher_id* was never registered.) + + .. versionadded:: 3.12 + + +.. c:type:: PyFunction_WatchEvent + + Enumeration of possible function watcher events: + - ``PyFunction_EVENT_CREATE`` + - ``PyFunction_EVENT_DESTROY`` + - ``PyFunction_EVENT_MODIFY_CODE`` + - ``PyFunction_EVENT_MODIFY_DEFAULTS`` + - ``PyFunction_EVENT_MODIFY_KWDEFAULTS`` + + .. versionadded:: 3.12 + + +.. c:type:: int (*PyFunction_WatchCallback)(PyFunction_WatchEvent event, PyFunctionObject *func, PyObject *new_value) + + Type of a function watcher callback function. + + If *event* is ``PyFunction_EVENT_CREATE`` or ``PyFunction_EVENT_DESTROY`` + then *new_value* will be ``NULL``. Otherwise, *new_value* will hold a + :term:`borrowed reference` to the new value that is about to be stored in + *func* for the attribute that is being modified. + + The callback may inspect but must not modify *func*; doing so could have + unpredictable effects, including infinite recursion. + + If *event* is ``PyFunction_EVENT_CREATE``, then the callback is invoked + after `func` has been fully initialized. Otherwise, the callback is invoked + before the modification to *func* takes place, so the prior state of *func* + can be inspected. The runtime is permitted to optimize away the creation of + function objects when possible. In such cases no event will be emitted. + Although this creates the possitibility of an observable difference of + runtime behavior depending on optimization decisions, it does not change + the semantics of the Python code being executed. + + If the callback returns with an exception set, it must return ``-1``; this + exception will be printed as an unraisable exception using + :c:func:`PyErr_WriteUnraisable`. Otherwise it should return ``0``. + + .. versionadded:: 3.12 diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 64bdfefd6494ff..c3346b0421ddef 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -254,7 +254,7 @@ PyPreConfig .. c:member:: int configure_locale - Set the LC_CTYPE locale to the user preferred locale? + Set the LC_CTYPE locale to the user preferred locale. If equals to ``0``, set :c:member:`~PyPreConfig.coerce_c_locale` and :c:member:`~PyPreConfig.coerce_c_locale_warn` members to ``0``. diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst index f726cd48663b1c..7041c15d23fb83 100644 --- a/Doc/c-api/memory.rst +++ b/Doc/c-api/memory.rst @@ -95,6 +95,8 @@ for the I/O buffer escapes completely the Python memory manager. Allocator Domains ================= +.. _allocator-domains: + All allocating functions belong to one of three different "domains" (see also :c:type:`PyMemAllocatorDomain`). These domains represent different allocation strategies and are optimized for different purposes. The specific details on @@ -479,6 +481,25 @@ Customize Memory Allocators See also :c:member:`PyPreConfig.allocator` and :ref:`Preinitialize Python with PyPreConfig `. + .. warning:: + + :c:func:`PyMem_SetAllocator` does have the following contract: + + * It can be called after :c:func:`Py_PreInitialize` and before + :c:func:`Py_InitializeFromConfig` to install a custom memory + allocator. There are no restrictions over the installed allocator + other than the ones imposed by the domain (for instance, the Raw + Domain allows the allocator to be called without the GIL held). See + :ref:`the section on allocator domains ` for more + information. + + * If called after Python has finish initializing (after + :c:func:`Py_InitializeFromConfig` has been called) the allocator + **must** wrap the existing allocator. Substituting the current + allocator for some other arbitrary one is **not supported**. + + + .. c:function:: void PyMem_SetupDebugHooks(void) Setup :ref:`debug hooks in the Python memory allocators ` diff --git a/Doc/c-api/refcounting.rst b/Doc/c-api/refcounting.rst index cd1f2ef7076836..d8e9c2da6f3ff3 100644 --- a/Doc/c-api/refcounting.rst +++ b/Doc/c-api/refcounting.rst @@ -7,8 +7,8 @@ Reference Counting ****************** -The macros in this section are used for managing reference counts of Python -objects. +The functions and macros in this section are used for managing reference counts +of Python objects. .. c:function:: Py_ssize_t Py_REFCNT(PyObject *o) @@ -129,6 +129,11 @@ objects. It is a good idea to use this macro whenever decrementing the reference count of an object that might be traversed during garbage collection. + .. versionchanged:: 3.12 + The macro argument is now only evaluated once. If the argument has side + effects, these are no longer duplicated. + + .. c:function:: void Py_IncRef(PyObject *o) Increment the reference count for object *o*. A function version of :c:func:`Py_XINCREF`. @@ -139,3 +144,40 @@ objects. Decrement the reference count for object *o*. A function version of :c:func:`Py_XDECREF`. It can be used for runtime dynamic embedding of Python. + + +.. c:macro:: Py_SETREF(dst, src) + + Macro safely decrementing the `dst` reference count and setting `dst` to + `src`. + + As in case of :c:func:`Py_CLEAR`, "the obvious" code can be deadly:: + + Py_DECREF(dst); + dst = src; + + The safe way is:: + + Py_SETREF(dst, src); + + That arranges to set `dst` to `src` _before_ decrementing reference count of + *dst* old value, so that any code triggered as a side-effect of `dst` + getting torn down no longer believes `dst` points to a valid object. + + .. versionadded:: 3.6 + + .. versionchanged:: 3.12 + The macro arguments are now only evaluated once. If an argument has side + effects, these are no longer duplicated. + + +.. c:macro:: Py_XSETREF(dst, src) + + Variant of :c:macro:`Py_SETREF` macro that uses :c:func:`Py_XDECREF` instead + of :c:func:`Py_DECREF`. + + .. versionadded:: 3.6 + + .. versionchanged:: 3.12 + The macro arguments are now only evaluated once. If an argument has side + effects, these are no longer duplicated. diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 183ac144c50dd3..827d624fc99edb 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -385,86 +385,67 @@ Accessing attributes of extension types .. c:type:: PyMemberDef Structure which describes an attribute of a type which corresponds to a C - struct member. Its fields are: + struct member. Its fields are, in order: - .. c:member:: const char* PyMemberDef.name + .. c:member:: const char* name - Name of the member + Name of the member. + A NULL value marks the end of a ``PyMemberDef[]`` array. - .. c:member:: int PyMemberDef.type - - The type of the member in the C struct. + The string should be static, no copy is made of it. .. c:member:: Py_ssize_t PyMemberDef.offset The offset in bytes that the member is located on the type’s object struct. - .. c:member:: int PyMemberDef.flags - - Flag bits indicating if the field should be read-only or writable. - - .. c:member:: const char* PyMemberDef.doc - - Points to the contents of the docstring. - - :c:member:`PyMemberDef.type` can be one of many ``T_`` macros corresponding to various C - types. When the member is accessed in Python, it will be converted to the - equivalent Python type. - - =============== ================== - Macro name C type - =============== ================== - T_SHORT short - T_INT int - T_LONG long - T_FLOAT float - T_DOUBLE double - T_STRING const char \* - T_OBJECT PyObject \* - T_OBJECT_EX PyObject \* - T_CHAR char - T_BYTE char - T_UBYTE unsigned char - T_UINT unsigned int - T_USHORT unsigned short - T_ULONG unsigned long - T_BOOL char - T_LONGLONG long long - T_ULONGLONG unsigned long long - T_PYSSIZET Py_ssize_t - =============== ================== - - :c:macro:`T_OBJECT` and :c:macro:`T_OBJECT_EX` differ in that - :c:macro:`T_OBJECT` returns ``None`` if the member is ``NULL`` and - :c:macro:`T_OBJECT_EX` raises an :exc:`AttributeError`. Try to use - :c:macro:`T_OBJECT_EX` over :c:macro:`T_OBJECT` because :c:macro:`T_OBJECT_EX` - handles use of the :keyword:`del` statement on that attribute more correctly - than :c:macro:`T_OBJECT`. - - :c:member:`PyMemberDef.flags` can be ``0`` for write and read access or :c:macro:`READONLY` for - read-only access. Using :c:macro:`T_STRING` for :attr:`type` implies - :c:macro:`READONLY`. :c:macro:`T_STRING` data is interpreted as UTF-8. - Only :c:macro:`T_OBJECT` and :c:macro:`T_OBJECT_EX` - members can be deleted. (They are set to ``NULL``). + .. c:member:: int type + + The type of the member in the C struct. + See :ref:`PyMemberDef-types` for the possible values. + + .. c:member:: int flags + + Zero or more of the :ref:`PyMemberDef-flags`, combined using bitwise OR. + + .. c:member:: const char* doc + + The docstring, or NULL. + The string should be static, no copy is made of it. + Typically, it is defined using :c:macro:`PyDoc_STR`. + + By default (when :c:member:`flags` is ``0``), members allow + both read and write access. + Use the :c:macro:`Py_READONLY` flag for read-only access. + Certain types, like :c:macro:`Py_T_STRING`, imply :c:macro:`Py_READONLY`. + Only :c:macro:`Py_T_OBJECT_EX` (and legacy :c:macro:`T_OBJECT`) members can + be deleted. .. _pymemberdef-offsets: - Heap allocated types (created using :c:func:`PyType_FromSpec` or similar), - ``PyMemberDef`` may contain definitions for the special member - ``__vectorcalloffset__``, corresponding to + For heap-allocated types (created using :c:func:`PyType_FromSpec` or similar), + ``PyMemberDef`` may contain a definition for the special member + ``"__vectorcalloffset__"``, corresponding to :c:member:`~PyTypeObject.tp_vectorcall_offset` in type objects. - These must be defined with ``T_PYSSIZET`` and ``READONLY``, for example:: + These must be defined with ``Py_T_PYSSIZET`` and ``Py_READONLY``, for example:: static PyMemberDef spam_type_members[] = { - {"__vectorcalloffset__", T_PYSSIZET, offsetof(Spam_object, vectorcall), READONLY}, + {"__vectorcalloffset__", Py_T_PYSSIZET, + offsetof(Spam_object, vectorcall), Py_READONLY}, {NULL} /* Sentinel */ }; + (You may need to ``#include `` for :c:func:`!offsetof`.) + The legacy offsets :c:member:`~PyTypeObject.tp_dictoffset` and - :c:member:`~PyTypeObject.tp_weaklistoffset` are still supported, but extensions are - strongly encouraged to use ``Py_TPFLAGS_MANAGED_DICT`` and - ``Py_TPFLAGS_MANAGED_WEAKREF`` instead. + :c:member:`~PyTypeObject.tp_weaklistoffset` can be defined similarly using + ``"__dictoffset__"`` and ``"__weaklistoffset__"`` members, but extensions + are strongly encouraged to use :const:`Py_TPFLAGS_MANAGED_DICT` and + :const:`Py_TPFLAGS_MANAGED_WEAKREF` instead. + + .. versionchanged:: 3.12 + ``PyMemberDef`` is always available. + Previously, it required including ``"structmember.h"``. .. c:function:: PyObject* PyMember_GetOne(const char *obj_addr, struct PyMemberDef *m) @@ -472,6 +453,10 @@ Accessing attributes of extension types attribute is described by ``PyMemberDef`` *m*. Returns ``NULL`` on error. + .. versionchanged:: 3.12 + + ``PyMember_GetOne`` is always available. + Previously, it required including ``"structmember.h"``. .. c:function:: int PyMember_SetOne(char *obj_addr, struct PyMemberDef *m, PyObject *o) @@ -479,29 +464,169 @@ Accessing attributes of extension types The attribute to set is described by ``PyMemberDef`` *m*. Returns ``0`` if successful and a negative value on failure. + .. versionchanged:: 3.12 + + ``PyMember_SetOne`` is always available. + Previously, it required including ``"structmember.h"``. + +.. _PyMemberDef-flags: + +Member flags +^^^^^^^^^^^^ + +The following flags can be used with :c:member:`PyMemberDef.flags`: + +.. c:macro:: Py_READONLY + + Not writable. + +.. c:macro:: Py_AUDIT_READ + + Emit an ``object.__getattr__`` :ref:`audit event ` + before reading. + +.. index:: + single: READ_RESTRICTED + single: WRITE_RESTRICTED + single: RESTRICTED + +.. versionchanged:: 3.10 + + The :const:`!RESTRICTED`, :const:`!READ_RESTRICTED` and + :const:`!WRITE_RESTRICTED` macros available with + ``#include "structmember.h"`` are deprecated. + :const:`!READ_RESTRICTED` and :const:`!RESTRICTED` are equivalent to + :const:`Py_AUDIT_READ`; :const:`!WRITE_RESTRICTED` does nothing. + +.. index:: + single: READONLY + +.. versionchanged:: 3.12 + + The :const:`!READONLY` macro was renamed to :const:`Py_READONLY`. + The :const:`!PY_AUDIT_READ` macro was renamed with the ``Py_`` prefix. + The new names are now always available. + Previously, these required ``#include "structmember.h"``. + The header is still available and it provides the old names. + +.. _PyMemberDef-types: + +Member types +^^^^^^^^^^^^ + +:c:member:`PyMemberDef.type` can be one of the following macros corresponding +to various C types. +When the member is accessed in Python, it will be converted to the +equivalent Python type. +When it is set from Python, it will be converted back to the C type. +If that is not possible, an exception such as :exc:`TypeError` or +:exc:`ValueError` is raised. + +Unless marked (D), attributes defined this way cannot be deleted +using e.g. :keyword:`del` or :py:func:`delattr`. + +================================ ============================= ====================== +Macro name C type Python type +================================ ============================= ====================== +.. c:macro:: Py_T_BYTE :c:expr:`char` :py:class:`int` +.. c:macro:: Py_T_SHORT :c:expr:`short` :py:class:`int` +.. c:macro:: Py_T_INT :c:expr:`int` :py:class:`int` +.. c:macro:: Py_T_LONG :c:expr:`long` :py:class:`int` +.. c:macro:: Py_T_LONGLONG :c:expr:`long long` :py:class:`int` +.. c:macro:: Py_T_UBYTE :c:expr:`unsigned char` :py:class:`int` +.. c:macro:: Py_T_UINT :c:expr:`unsigned int` :py:class:`int` +.. c:macro:: Py_T_USHORT :c:expr:`unsigned short` :py:class:`int` +.. c:macro:: Py_T_ULONG :c:expr:`unsigned long` :py:class:`int` +.. c:macro:: Py_T_ULONGLONG :c:expr:`unsigned long long` :py:class:`int` +.. c:macro:: Py_T_PYSSIZET :c:expr:`Py_ssize_t` :py:class:`int` +.. c:macro:: Py_T_FLOAT :c:expr:`float` :py:class:`float` +.. c:macro:: Py_T_DOUBLE :c:expr:`double` :py:class:`float` +.. c:macro:: Py_T_BOOL :c:expr:`char` :py:class:`bool` + (written as 0 or 1) +.. c:macro:: Py_T_STRING :c:expr:`const char *` (*) :py:class:`str` (RO) +.. c:macro:: Py_T_STRING_INPLACE :c:expr:`const char[]` (*) :py:class:`str` (RO) +.. c:macro:: Py_T_CHAR :c:expr:`char` (0-127) :py:class:`str` (**) +.. c:macro:: Py_T_OBJECT_EX :c:expr:`PyObject *` :py:class:`object` (D) +================================ ============================= ====================== + + (*): Zero-terminated, UTF8-encoded C string. + With :c:macro:`!Py_T_STRING` the C representation is a pointer; + with :c:macro:`!Py_T_STRING_INLINE` the string is stored directly + in the structure. + + (**): String of length 1. Only ASCII is accepted. + + (RO): Implies :c:macro:`Py_READONLY`. + + (D): Can be deleted, in which case the pointer is set to ``NULL``. + Reading a ``NULL`` pointer raises :py:exc:`AttributeError`. + +.. index:: + single: T_BYTE + single: T_SHORT + single: T_INT + single: T_LONG + single: T_LONGLONG + single: T_UBYTE + single: T_USHORT + single: T_UINT + single: T_ULONG + single: T_ULONGULONG + single: T_PYSSIZET + single: T_FLOAT + single: T_DOUBLE + single: T_BOOL + single: T_CHAR + single: T_STRING + single: T_STRING_INPLACE + single: T_OBJECT_EX + single: structmember.h + +.. versionadded:: 3.12 + + In previous versions, the macros were only available with + ``#include "structmember.h"`` and were named without the ``Py_`` prefix + (e.g. as ``T_INT``). + The header is still available and contains the old names, along with + the following deprecated types: + + .. c:macro:: T_OBJECT + + Like ``Py_T_OBJECT_EX``, but ``NULL`` is converted to ``None``. + This results in surprising behavior in Python: deleting the attribute + effectively sets it to ``None``. + + .. c:macro:: T_NONE + + Always ``None``. Must be used with :c:macro:`Py_READONLY`. + +Defining Getters and Setters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. c:type:: PyGetSetDef Structure to define property-like access for a type. See also description of the :c:member:`PyTypeObject.tp_getset` slot. - +-------------+------------------+-----------------------------------+ - | Field | C Type | Meaning | - +=============+==================+===================================+ - | name | const char \* | attribute name | - +-------------+------------------+-----------------------------------+ - | get | getter | C function to get the attribute | - +-------------+------------------+-----------------------------------+ - | set | setter | optional C function to set or | - | | | delete the attribute, if omitted | - | | | the attribute is readonly | - +-------------+------------------+-----------------------------------+ - | doc | const char \* | optional docstring | - +-------------+------------------+-----------------------------------+ - | closure | void \* | optional function pointer, | - | | | providing additional data for | - | | | getter and setter | - +-------------+------------------+-----------------------------------+ + .. c:member:: const char* PyGetSetDef.name + + attribute name + + .. c:member:: getter PyGetSetDef.get + + C funtion to get the attribute. + + .. c:member:: setter PyGetSetDef.set + + Optional C function to set or delete the attribute, if omitted the attribute is readonly. + + .. c:member:: const char* PyGetSetDef.doc + + optional docstring + + .. c:member:: void* PyGetSetDef.closure + + Optional function pointer, providing additional data for getter and setter. The ``get`` function takes one :c:expr:`PyObject*` parameter (the instance) and a function pointer (the associated ``closure``):: diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 8ccdece3efc54c..c7b318b21853e5 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -147,12 +147,20 @@ Quick Reference +------------------------------------------------+-----------------------------------+-------------------+---+---+---+---+ | :c:member:`~PyTypeObject.tp_vectorcall` | :c:type:`vectorcallfunc` | | | | | | +------------------------------------------------+-----------------------------------+-------------------+---+---+---+---+ + | :c:member:`~PyTypeObject.tp_watched` | char | | | | | | + +------------------------------------------------+-----------------------------------+-------------------+---+---+---+---+ .. [#slots] - A slot name in parentheses indicates it is (effectively) deprecated. - Names in angle brackets should be treated as read-only. - Names in square brackets are for internal use only. - "" (as a prefix) means the field is required (must be non-``NULL``). + + **()**: A slot name in parentheses indicates it is (effectively) deprecated. + + **<>**: Names in angle brackets should be initially set to ``NULL`` and + treated as read-only. + + **[]**: Names in square brackets are for internal use only. + + **** (as a prefix) means the field is required (must be non-``NULL``). + .. [#cols] Columns: **"O"**: set on :c:type:`PyBaseObject_Type` @@ -1245,6 +1253,17 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** This flag is not inherited. + However, subclasses will not be instantiable unless they provide a + non-NULL :c:member:`~PyTypeObject.tp_new` (which is only possible + via the C API). + + .. note:: + + To disallow instantiating a class directly but allow instantiating + its subclasses (e.g. for an :term:`abstract base class`), + do not use this flag. + Instead, make :c:member:`~PyTypeObject.tp_new` only succeed for + subclasses. .. versionadded:: 3.10 @@ -1912,8 +1931,19 @@ and :c:type:`PyType_Type` effectively act as defaults.) Tuple of base types. - This is set for types created by a class statement. It should be ``NULL`` for - statically defined types. + This field should be set to ``NULL`` and treated as read-only. + Python will fill it in when the type is :c:func:`initialized `. + + For dynamically created classes, the ``Py_tp_bases`` + :c:type:`slot ` can be used instead of the *bases* argument + of :c:func:`PyType_FromSpecWithBases`. + The argument form is preferred. + + .. warning:: + + Multiple inheritance does not work well for statically defined types. + If you set ``tp_bases`` to a tuple, Python will not raise an error, + but some slots will only be inherited from the first base. **Inheritance:** @@ -1925,6 +1955,8 @@ and :c:type:`PyType_Type` effectively act as defaults.) Tuple containing the expanded set of base types, starting with the type itself and ending with :class:`object`, in Method Resolution Order. + This field should be set to ``NULL`` and treated as read-only. + Python will fill it in when the type is :c:func:`initialized `. **Inheritance:** @@ -2060,6 +2092,13 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. versionadded:: 3.9 (the field exists since 3.8 but it's only used since 3.9) +.. c:member:: char PyTypeObject.tp_watched + + Internal. Do not use. + + .. versionadded:: 3.12 + + .. _static-types: Static Types diff --git a/Doc/conf.py b/Doc/conf.py index be1c9fff51a277..b3da8fa9ec4497 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -13,9 +13,25 @@ # General configuration # --------------------- -extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm', - 'asdl_highlight', 'peg_highlight', 'glossary_search'] +extensions = [ + 'asdl_highlight', + 'c_annotations', + 'escape4chm', + 'glossary_search', + 'peg_highlight', + 'pyspecific', + 'sphinx.ext.coverage', + 'sphinx.ext.doctest', +] + +# Skip if downstream redistributors haven't installed it +try: + import sphinxext.opengraph +except ImportError: + pass +else: + extensions.append('sphinxext.opengraph') + doctest_global_setup = ''' try: @@ -89,6 +105,14 @@ # Short title used e.g. for HTML tags. html_short_title = '%s Documentation' % release +# Deployment preview information, from Netlify +# (See netlify.toml and https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +html_context = { + "is_deployment_preview": os.getenv("IS_DEPLOYMENT_PREVIEW"), + "repository_url": os.getenv("REPOSITORY_URL"), + "pr_id": os.getenv("REVIEW_ID") +} + # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%b %d, %Y' @@ -114,7 +138,7 @@ html_use_opensearch = 'https://docs.python.org/' + version # Additional static files. -html_static_path = ['tools/static'] +html_static_path = ['_static', 'tools/static'] # Output file base name for HTML help builder. htmlhelp_basename = 'python' + release.replace('.', '') @@ -197,7 +221,6 @@ coverage_ignore_modules = [ r'[T|t][k|K]', r'Tix', - r'distutils.*', ] coverage_ignore_functions = [ @@ -239,3 +262,13 @@ # Relative filename of the data files refcount_file = 'data/refcounts.dat' stable_abi_file = 'data/stable_abi.dat' + +# sphinxext-opengraph config +ogp_site_url = 'https://docs.python.org/3/' +ogp_site_name = 'Python documentation' +ogp_image = '_static/og-image.png' +ogp_custom_meta_tags = [ + '<meta property="og:image:width" content="200">', + '<meta property="og:image:height" content="200">', + '<meta name="theme-color" content="#3776ab">', +] diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index 51ccacf13f9e3b..349c4dd5be3d81 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -1018,10 +1018,10 @@ PyImport_Import:PyObject*::+1: PyImport_Import:PyObject*:name:0: PyImport_ImportFrozenModule:int::: -PyImport_ImportFrozenModule:const char*::: +PyImport_ImportFrozenModule:const char*:name:: PyImport_ImportFrozenModuleObject:int::: -PyImport_ImportFrozenModuleObject:PyObject*::+1: +PyImport_ImportFrozenModuleObject:PyObject*:name:+1: PyImport_ImportModule:PyObject*::+1: PyImport_ImportModule:const char*:name:: diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index db8fc15d93d15a..53895bbced8408 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -386,6 +386,8 @@ function,PyMem_Malloc,3.2,, function,PyMem_Realloc,3.2,, type,PyMemberDef,3.2,,full-abi var,PyMemberDescr_Type,3.2,, +function,PyMember_GetOne,3.2,, +function,PyMember_SetOne,3.2,, function,PyMemoryView_FromBuffer,3.11,, function,PyMemoryView_FromMemory,3.7,, function,PyMemoryView_FromObject,3.2,, diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index 3de849ade78888..80a1387db200c2 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -286,36 +286,11 @@ be read-only or read-write. The structures in the table are defined as:: For each entry in the table, a :term:`descriptor` will be constructed and added to the type which will be able to extract a value from the instance structure. The -:attr:`type` field should contain one of the type codes defined in the -:file:`structmember.h` header; the value will be used to determine how to +:attr:`type` field should contain a type code like :c:macro:`Py_T_INT` or +:c:macro:`Py_T_DOUBLE`; the value will be used to determine how to convert Python values to and from C values. The :attr:`flags` field is used to -store flags which control how the attribute can be accessed. - -The following flag constants are defined in :file:`structmember.h`; they may be -combined using bitwise-OR. - -+---------------------------+----------------------------------------------+ -| Constant | Meaning | -+===========================+==============================================+ -| :const:`READONLY` | Never writable. | -+---------------------------+----------------------------------------------+ -| :const:`PY_AUDIT_READ` | Emit an ``object.__getattr__`` | -| | :ref:`audit events <audit-events>` before | -| | reading. | -+---------------------------+----------------------------------------------+ - -.. versionchanged:: 3.10 - :const:`RESTRICTED`, :const:`READ_RESTRICTED` and :const:`WRITE_RESTRICTED` - are deprecated. However, :const:`READ_RESTRICTED` is an alias for - :const:`PY_AUDIT_READ`, so fields that specify either :const:`RESTRICTED` - or :const:`READ_RESTRICTED` will also raise an audit event. - -.. index:: - single: READONLY - single: READ_RESTRICTED - single: WRITE_RESTRICTED - single: RESTRICTED - single: PY_AUDIT_READ +store flags which control how the attribute can be accessed: you can set it to +:c:macro:`Py_READONLY` to prevent Python code from setting it. An interesting advantage of using the :c:member:`~PyTypeObject.tp_members` table to build descriptors that are used at runtime is that any attribute defined this way can diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index 5d4a3f06dd5402..54de3fd42437d9 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -239,13 +239,6 @@ adds these capabilities: This version of the module has a number of changes. -We've added an extra include:: - - #include <structmember.h> - -This include provides declarations that we use to handle attributes, as -described a bit later. - The :class:`Custom` type now has three data attributes in its C struct, *first*, *last*, and *number*. The *first* and *last* variables are Python strings containing first and last names. The *number* attribute is a C integer. @@ -436,11 +429,11 @@ We want to expose our instance variables as attributes. There are a number of ways to do that. The simplest way is to define member definitions:: static PyMemberDef Custom_members[] = { - {"first", T_OBJECT_EX, offsetof(CustomObject, first), 0, + {"first", Py_T_OBJECT_EX, offsetof(CustomObject, first), 0, "first name"}, - {"last", T_OBJECT_EX, offsetof(CustomObject, last), 0, + {"last", Py_T_OBJECT_EX, offsetof(CustomObject, last), 0, "last name"}, - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -609,7 +602,7 @@ above. In this case, we aren't using a closure, so we just pass ``NULL``. We also remove the member definitions for these attributes:: static PyMemberDef Custom_members[] = { - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index ad281f826dd42c..584d33e9622e33 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -25,8 +25,9 @@ Reference Manual <pdb>`. You can also write your own debugger by using the code for pdb as an example. The IDLE interactive development environment, which is part of the standard -Python distribution (normally available as Tools/scripts/idle), includes a -graphical debugger. +Python distribution (normally available as +`Tools/scripts/idle3 <https://github.com/python/cpython/blob/main/Tools/scripts/idle3>`_), +includes a graphical debugger. PythonWin is a Python IDE that includes a GUI debugger based on pdb. The PythonWin debugger colors breakpoints and has quite a few cool features such as @@ -78,7 +79,8 @@ set of modules required by a program and bind these modules together with a Python binary to produce a single executable. One is to use the freeze tool, which is included in the Python source tree as -``Tools/freeze``. It converts Python byte code to C arrays; with a C compiler you can +`Tools/freeze <https://github.com/python/cpython/tree/main/Tools/freeze>`_. +It converts Python byte code to C arrays; with a C compiler you can embed all your modules into a new program, which is then linked with the standard Python modules. @@ -114,7 +116,7 @@ Core Language Why am I getting an UnboundLocalError when the variable has a value? -------------------------------------------------------------------- -It can be a surprise to get the UnboundLocalError in previously working +It can be a surprise to get the :exc:`UnboundLocalError` in previously working code when it is modified by adding an assignment statement somewhere in the body of a function. @@ -123,6 +125,7 @@ This code: >>> x = 10 >>> def bar(): ... print(x) + ... >>> bar() 10 @@ -133,7 +136,7 @@ works, but this code: ... print(x) ... x += 1 -results in an UnboundLocalError: +results in an :exc:`!UnboundLocalError`: >>> foo() Traceback (most recent call last): @@ -155,6 +158,7 @@ global: ... global x ... print(x) ... x += 1 + ... >>> foobar() 10 @@ -176,6 +180,7 @@ keyword: ... x += 1 ... bar() ... print(x) + ... >>> foo() 10 11 @@ -273,7 +278,7 @@ main.py:: import mod print(config.x) -Note that using a module is also the basis for implementing the Singleton design +Note that using a module is also the basis for implementing the singleton design pattern, for the same reason. @@ -291,9 +296,9 @@ using multiple imports per line uses less screen space. It's good practice if you import modules in the following order: -1. standard library modules -- e.g. ``sys``, ``os``, ``getopt``, ``re`` +1. standard library modules -- e.g. :mod:`sys`, :mod:`os`, :mod:`argparse`, :mod:`re` 2. third-party library modules (anything installed in Python's site-packages - directory) -- e.g. mx.DateTime, ZODB, PIL.Image, etc. + directory) -- e.g. :mod:`!dateutil`, :mod:`!requests`, :mod:`!PIL.Image` 3. locally developed modules It is sometimes necessary to move imports to a function or class to avoid @@ -471,7 +476,7 @@ object ``x`` refers to). After this assignment we have two objects (the ints Some operations (for example ``y.append(10)`` and ``y.sort()``) mutate the object, whereas superficially similar operations (for example ``y = y + [10]`` -and ``sorted(y)``) create a new object. In general in Python (and in all cases +and :func:`sorted(y) <sorted>`) create a new object. In general in Python (and in all cases in the standard library) a method that mutates an object will return ``None`` to help avoid getting the two types of operations confused. So if you mistakenly write ``y.sort()`` thinking it will give you a sorted copy of ``y``, @@ -644,7 +649,7 @@ Sequences can be copied by slicing:: How can I find the methods or attributes of an object? ------------------------------------------------------ -For an instance x of a user-defined class, ``dir(x)`` returns an alphabetized +For an instance ``x`` of a user-defined class, :func:`dir(x) <dir>` returns an alphabetized list of the names containing the instance attributes and methods and attributes defined by its class. @@ -669,9 +674,9 @@ callable. Consider the following code:: <__main__.A object at 0x16D07CC> Arguably the class has a name: even though it is bound to two names and invoked -through the name B the created instance is still reported as an instance of -class A. However, it is impossible to say whether the instance's name is a or -b, since both names are bound to the same value. +through the name ``B`` the created instance is still reported as an instance of +class ``A``. However, it is impossible to say whether the instance's name is ``a`` or +``b``, since both names are bound to the same value. Generally speaking it should not be necessary for your code to "know the names" of particular values. Unless you are deliberately writing introspective @@ -841,7 +846,7 @@ How do I get int literal attribute instead of SyntaxError? ---------------------------------------------------------- Trying to lookup an ``int`` literal attribute in the normal manner gives -a syntax error because the period is seen as a decimal point:: +a :exc:`SyntaxError` because the period is seen as a decimal point:: >>> 1.__class__ File "<stdin>", line 1 @@ -887,7 +892,7 @@ leading '0' in a decimal number (except '0'). How do I convert a number to a string? -------------------------------------- -To convert, e.g., the number 144 to the string '144', use the built-in type +To convert, e.g., the number ``144`` to the string ``'144'``, use the built-in type constructor :func:`str`. If you want a hexadecimal or octal representation, use the built-in functions :func:`hex` or :func:`oct`. For fancy formatting, see the :ref:`f-strings` and :ref:`formatstrings` sections, @@ -1006,11 +1011,11 @@ Not as such. For simple input parsing, the easiest approach is usually to split the line into whitespace-delimited words using the :meth:`~str.split` method of string objects and then convert decimal strings to numeric values using :func:`int` or -:func:`float`. ``split()`` supports an optional "sep" parameter which is useful +:func:`float`. :meth:`!split()` supports an optional "sep" parameter which is useful if the line uses something other than whitespace as a separator. For more complicated input parsing, regular expressions are more powerful -than C's :c:func:`sscanf` and better suited for the task. +than C's ``sscanf`` and better suited for the task. What does 'UnicodeDecodeError' or 'UnicodeEncodeError' error mean? @@ -1206,15 +1211,16 @@ difference is that a Python list can contain objects of many different types. The ``array`` module also provides methods for creating arrays of fixed types with compact representations, but they are slower to index than lists. Also -note that NumPy and other third party packages define array-like structures with +note that `NumPy <https://numpy.org/>`_ +and other third party packages define array-like structures with various characteristics as well. -To get Lisp-style linked lists, you can emulate cons cells using tuples:: +To get Lisp-style linked lists, you can emulate *cons cells* using tuples:: lisp_list = ("like", ("this", ("example", None) ) ) If mutability is desired, you could use lists instead of tuples. Here the -analogue of lisp car is ``lisp_list[0]`` and the analogue of cdr is +analogue of a Lisp *car* is ``lisp_list[0]`` and the analogue of *cdr* is ``lisp_list[1]``. Only do this if you're sure you really need to, because it's usually a lot slower than using Python lists. @@ -1273,13 +1279,25 @@ Or, you can use an extension that provides a matrix datatype; `NumPy <https://numpy.org/>`_ is the best known. -How do I apply a method to a sequence of objects? -------------------------------------------------- +How do I apply a method or function to a sequence of objects? +------------------------------------------------------------- -Use a list comprehension:: +To call a method or function and accumulate the return values is a list, +a :term:`list comprehension` is an elegant solution:: result = [obj.method() for obj in mylist] + result = [function(obj) for obj in mylist] + +To just run the method or function without saving the return values, +a plain :keyword:`for` loop will suffice:: + + for obj in mylist: + obj.method() + + for obj in mylist: + function(obj) + .. _faq-augmented-assignment-tuple-error: Why does a_tuple[i] += ['item'] raise an exception when the addition works? @@ -1334,11 +1352,12 @@ that even though there was an error, the append worked:: ['foo', 'item'] To see why this happens, you need to know that (a) if an object implements an -``__iadd__`` magic method, it gets called when the ``+=`` augmented assignment +:meth:`~object.__iadd__` magic method, it gets called when the ``+=`` augmented +assignment is executed, and its return value is what gets used in the assignment statement; -and (b) for lists, ``__iadd__`` is equivalent to calling ``extend`` on the list +and (b) for lists, :meth:`!__iadd__` is equivalent to calling :meth:`~list.extend` on the list and returning the list. That's why we say that for lists, ``+=`` is a -"shorthand" for ``list.extend``:: +"shorthand" for :meth:`!list.extend`:: >>> a_list = [] >>> a_list += [1] @@ -1363,7 +1382,7 @@ Thus, in our tuple example what is happening is equivalent to:: ... TypeError: 'tuple' object does not support item assignment -The ``__iadd__`` succeeds, and thus the list is extended, but even though +The :meth:`!__iadd__` succeeds, and thus the list is extended, but even though ``result`` points to the same object that ``a_tuple[0]`` already points to, that final assignment still results in an error, because tuples are immutable. @@ -1440,7 +1459,8 @@ See also :ref:`why-self`. How do I check if an object is an instance of a given class or of a subclass of it? ----------------------------------------------------------------------------------- -Use the built-in function ``isinstance(obj, cls)``. You can check if an object +Use the built-in function :func:`isinstance(obj, cls) <isinstance>`. You can +check if an object is an instance of any of a number of classes by providing a tuple instead of a single class, e.g. ``isinstance(obj, (class1, class2, ...))``, and can also check whether an object is one of Python's built-in types, e.g. @@ -1537,13 +1557,13 @@ Here the ``UpperOut`` class redefines the ``write()`` method to convert the argument string to uppercase before calling the underlying ``self._outfile.write()`` method. All other methods are delegated to the underlying ``self._outfile`` object. The delegation is accomplished via the -``__getattr__`` method; consult :ref:`the language reference <attribute-access>` +:meth:`~object.__getattr__` method; consult :ref:`the language reference <attribute-access>` for more information about controlling attribute access. Note that for more general cases delegation can get trickier. When attributes -must be set as well as retrieved, the class must define a :meth:`__setattr__` +must be set as well as retrieved, the class must define a :meth:`~object.__setattr__` method too, and it must do so carefully. The basic implementation of -:meth:`__setattr__` is roughly equivalent to the following:: +:meth:`!__setattr__` is roughly equivalent to the following:: class X: ... @@ -1551,7 +1571,8 @@ method too, and it must do so carefully. The basic implementation of self.__dict__[name] = value ... -Most :meth:`__setattr__` implementations must modify ``self.__dict__`` to store +Most :meth:`!__setattr__` implementations must modify +:meth:`self.__dict__ <object.__dict__>` to store local state for self without causing an infinite recursion. @@ -1689,17 +1710,17 @@ My class defines __del__ but it is not called when I delete the object. There are several possible reasons for this. -The del statement does not necessarily call :meth:`__del__` -- it simply +The :keyword:`del` statement does not necessarily call :meth:`~object.__del__` -- it simply decrements the object's reference count, and if this reaches zero -:meth:`__del__` is called. +:meth:`!__del__` is called. If your data structures contain circular links (e.g. a tree where each child has a parent reference and each parent has a list of children) the reference counts will never go back to zero. Once in a while Python runs an algorithm to detect such cycles, but the garbage collector might run some time after the last -reference to your data structure vanishes, so your :meth:`__del__` method may be +reference to your data structure vanishes, so your :meth:`!__del__` method may be called at an inconvenient and random time. This is inconvenient if you're trying -to reproduce a problem. Worse, the order in which object's :meth:`__del__` +to reproduce a problem. Worse, the order in which object's :meth:`!__del__` methods are executed is arbitrary. You can run :func:`gc.collect` to force a collection, but there *are* pathological cases where objects will never be collected. @@ -1707,7 +1728,7 @@ collected. Despite the cycle collector, it's still a good idea to define an explicit ``close()`` method on objects to be called whenever you're done with them. The ``close()`` method can then remove attributes that refer to subobjects. Don't -call :meth:`__del__` directly -- :meth:`__del__` should call ``close()`` and +call :meth:`!__del__` directly -- :meth:`!__del__` should call ``close()`` and ``close()`` should make sure that it can be called more than once for the same object. @@ -1724,7 +1745,7 @@ and sibling references (if they need them!). Normally, calling :func:`sys.exc_clear` will take care of this by clearing the last recorded exception. -Finally, if your :meth:`__del__` method raises an exception, a warning message +Finally, if your :meth:`!__del__` method raises an exception, a warning message is printed to :data:`sys.stderr`. @@ -1852,8 +1873,8 @@ For example, here is the implementation of How can a subclass control what data is stored in an immutable instance? ------------------------------------------------------------------------ -When subclassing an immutable type, override the :meth:`__new__` method -instead of the :meth:`__init__` method. The latter only runs *after* an +When subclassing an immutable type, override the :meth:`~object.__new__` method +instead of the :meth:`~object.__init__` method. The latter only runs *after* an instance is created, which is too late to alter data in an immutable instance. @@ -1955,8 +1976,8 @@ can't be made to work because it cannot detect changes to the attributes. To make the *lru_cache* approach work when the *station_id* is mutable, -the class needs to define the *__eq__* and *__hash__* methods so that -the cache can detect relevant attribute updates:: +the class needs to define the :meth:`~object.__eq__` and :meth:`~object.__hash__` +methods so that the cache can detect relevant attribute updates:: class Weather: "Example with a mutable station identifier" diff --git a/Doc/faq/windows.rst b/Doc/faq/windows.rst index 7768aafd979685..c0c92fdbbc84d1 100644 --- a/Doc/faq/windows.rst +++ b/Doc/faq/windows.rst @@ -167,7 +167,7 @@ How can I embed Python into a Windows application? Embedding the Python interpreter in a Windows app can be summarized as follows: -1. Do _not_ build Python into your .exe file directly. On Windows, Python must +1. Do **not** build Python into your .exe file directly. On Windows, Python must be a DLL to handle importing modules that are themselves DLL's. (This is the first key undocumented fact.) Instead, link to :file:`python{NN}.dll`; it is typically installed in ``C:\Windows\System``. *NN* is the Python version, a @@ -191,7 +191,7 @@ Embedding the Python interpreter in a Windows app can be summarized as follows: 2. If you use SWIG, it is easy to create a Python "extension module" that will make the app's data and methods available to Python. SWIG will handle just about all the grungy details for you. The result is C code that you link - *into* your .exe file (!) You do _not_ have to create a DLL file, and this + *into* your .exe file (!) You do **not** have to create a DLL file, and this also simplifies linking. 3. SWIG will create an init function (a C function) whose name depends on the @@ -218,10 +218,10 @@ Embedding the Python interpreter in a Windows app can be summarized as follows: 5. There are two problems with Python's C API which will become apparent if you use a compiler other than MSVC, the compiler used to build pythonNN.dll. - Problem 1: The so-called "Very High Level" functions that take FILE * + Problem 1: The so-called "Very High Level" functions that take ``FILE *`` arguments will not work in a multi-compiler environment because each - compiler's notion of a struct FILE will be different. From an implementation - standpoint these are very _low_ level functions. + compiler's notion of a ``struct FILE`` will be different. From an implementation + standpoint these are very low level functions. Problem 2: SWIG generates the following code when generating wrappers to void functions: diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst index f3ad117a3d3bc6..f4d08e75d94642 100644 --- a/Doc/howto/argparse.rst +++ b/Doc/howto/argparse.rst @@ -761,9 +761,9 @@ your program, just in case they don't know:: if args.quiet: print(answer) elif args.verbose: - print("{} to the power {} equals {}".format(args.x, args.y, answer)) + print(f"{args.x} to the power {args.y} equals {answer}") else: - print("{}^{} == {}".format(args.x, args.y, answer)) + print(f"{args.x}^{args.y} == {answer}") Note that slight difference in the usage text. Note the ``[-v | -q]``, which tells us that we can either use ``-v`` or ``-q``, diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst index 03f05657997cfd..4525acb04503b3 100644 --- a/Doc/howto/enum.rst +++ b/Doc/howto/enum.rst @@ -158,6 +158,7 @@ And a function to display the chores for a given day:: ... for chore, days in chores.items(): ... if day in days: ... print(chore) + ... >>> show_chores(chores_for_ethan, Weekday.SATURDAY) answer SO questions @@ -173,6 +174,7 @@ yourself some work and use :func:`auto()` for the values:: ... FRIDAY = auto() ... SATURDAY = auto() ... SUNDAY = auto() + ... WEEKEND = SATURDAY | SUNDAY .. _enum-advanced-tutorial: @@ -305,6 +307,10 @@ Iterating over the members of an enum does not provide the aliases:: >>> list(Shape) [<Shape.SQUARE: 2>, <Shape.DIAMOND: 1>, <Shape.CIRCLE: 3>] + >>> list(Weekday) + [<Weekday.MONDAY: 1>, <Weekday.TUESDAY: 2>, <Weekday.WEDNESDAY: 4>, <Weekday.THURSDAY: 8>, <Weekday.FRIDAY: 16>, <Weekday.SATURDAY: 32>, <Weekday.SUNDAY: 64>] + +Note that the aliases ``Shape.ALIAS_FOR_SQUARE`` and ``Weekday.WEEKEND`` aren't shown. The special attribute ``__members__`` is a read-only ordered mapping of names to members. It includes all names defined in the enumeration, including the @@ -324,6 +330,11 @@ the enumeration members. For example, finding all the aliases:: >>> [name for name, member in Shape.__members__.items() if member.name != name] ['ALIAS_FOR_SQUARE'] +.. note:: + + Aliases for flags include values with multiple flags set, such as ``3``, + and no flags set, i.e. ``0``. + Comparisons ----------- @@ -449,6 +460,31 @@ sense to allow sharing some common behavior between a group of enumerations. (See `OrderedEnum`_ for an example.) +.. _enum-dataclass-support: + +Dataclass support +----------------- + +When inheriting from a :class:`~dataclasses.dataclass`, +the :meth:`~Enum.__repr__` omits the inherited class' name. For example:: + + >>> @dataclass + ... class CreatureDataMixin: + ... size: str + ... legs: int + ... tail: bool = field(repr=False, default=True) + ... + >>> class Creature(CreatureDataMixin, Enum): + ... BEETLE = 'small', 6 + ... DOG = 'medium', 4 + ... + >>> Creature.DOG + <Creature.DOG: size='medium', legs=4> + +Use the :func:`!dataclass` argument ``repr=False`` +to use the standard :func:`repr`. + + Pickling -------- @@ -677,6 +713,7 @@ It is also possible to name the combinations:: ... W = 2 ... X = 1 ... RWX = 7 + ... >>> Perm.RWX <Perm.RWX: 7> >>> ~Perm.RWX @@ -751,7 +788,7 @@ flags being set, the boolean evaluation is :data:`False`:: False Individual flags should have values that are powers of two (1, 2, 4, 8, ...), -while combinations of flags won't:: +while combinations of flags will not:: >>> class Color(Flag): ... RED = auto() @@ -1096,8 +1133,8 @@ example of when ``KEEP`` is needed). .. _enum-class-differences: -How are Enums different? ------------------------- +How are Enums and Flags different? +---------------------------------- Enums have a custom metaclass that affects many aspects of both derived :class:`Enum` classes and their instances (members). @@ -1114,6 +1151,13 @@ responsible for ensuring that various other methods on the final :class:`Enum` class are correct (such as :meth:`__new__`, :meth:`__getnewargs__`, :meth:`__str__` and :meth:`__repr__`). +Flag Classes +^^^^^^^^^^^^ + +Flags have an expanded view of aliasing: to be canonical, the value of a flag +needs to be a power-of-two value, and not a duplicate name. So, in addition to the +:class:`Enum` definition of alias, a flag with no value (a.k.a. ``0``) or with more than one +power-of-two value (e.g. ``3``) is considered an alias. Enum Members (aka instances) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1123,9 +1167,35 @@ The most interesting thing about enum members is that they are singletons. and then puts a custom :meth:`__new__` in place to ensure that no new ones are ever instantiated by returning only the existing member instances. +Flag Members +^^^^^^^^^^^^ + +Flag members can be iterated over just like the :class:`Flag` class, and only the +canonical members will be returned. For example:: + + >>> list(Color) + [<Color.RED: 1>, <Color.GREEN: 2>, <Color.BLUE: 4>] + +(Note that ``BLACK``, ``PURPLE``, and ``WHITE`` do not show up.) + +Inverting a flag member returns the corresponding positive value, +rather than a negative value --- for example:: + + >>> ~Color.RED + <Color.GREEN|BLUE: 6> + +Flag members have a length corresponding to the number of power-of-two values +they contain. For example:: + + >>> len(Color.PURPLE) + 2 + .. _enum-cookbook: +Enum Cookbook +------------- + While :class:`Enum`, :class:`IntEnum`, :class:`StrEnum`, :class:`Flag`, and :class:`IntFlag` are expected to cover the majority of use-cases, they cannot @@ -1299,7 +1369,7 @@ enumerations):: DuplicateFreeEnum ^^^^^^^^^^^^^^^^^ -Raises an error if a duplicate member name is found instead of creating an +Raises an error if a duplicate member value is found instead of creating an alias:: >>> class DuplicateFreeEnum(Enum): diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst index ae101e3cdf2501..bf6f54a841a7b9 100644 --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -765,13 +765,71 @@ serialization. Running a logging socket listener in production ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -To run a logging listener in production, you may need to use a process-management tool -such as `Supervisor <http://supervisord.org/>`_. `Here -<https://gist.github.com/vsajip/4b227eeec43817465ca835ca66f75e2b>`_ is a Gist which -provides the bare-bones files to run the above functionality using Supervisor: you -will need to change the ``/path/to/`` parts in the Gist to reflect the actual paths you -want to use. - +.. _socket-listener-gist: https://gist.github.com/vsajip/4b227eeec43817465ca835ca66f75e2b + +To run a logging listener in production, you may need to use a +process-management tool such as `Supervisor <http://supervisord.org/>`_. +`Here is a Gist <socket-listener-gist_>`__ +which provides the bare-bones files to run the above functionality using +Supervisor. It consists of the following files: + ++-------------------------+----------------------------------------------------+ +| File | Purpose | ++=========================+====================================================+ +| :file:`prepare.sh` | A Bash script to prepare the environment for | +| | testing | ++-------------------------+----------------------------------------------------+ +| :file:`supervisor.conf` | The Supervisor configuration file, which has | +| | entries for the listener and a multi-process web | +| | application | ++-------------------------+----------------------------------------------------+ +| :file:`ensure_app.sh` | A Bash script to ensure that Supervisor is running | +| | with the above configuration | ++-------------------------+----------------------------------------------------+ +| :file:`log_listener.py` | The socket listener program which receives log | +| | events and records them to a file | ++-------------------------+----------------------------------------------------+ +| :file:`main.py` | A simple web application which performs logging | +| | via a socket connected to the listener | ++-------------------------+----------------------------------------------------+ +| :file:`webapp.json` | A JSON configuration file for the web application | ++-------------------------+----------------------------------------------------+ +| :file:`client.py` | A Python script to exercise the web application | ++-------------------------+----------------------------------------------------+ + +The web application uses `Gunicorn <https://gunicorn.org/>`_, which is a +popular web application server that starts multiple worker processes to handle +requests. This example setup shows how the workers can write to the same log file +without conflicting with one another --- they all go through the socket listener. + +To test these files, do the following in a POSIX environment: + +#. Download `the Gist <socket-listener-gist_>`__ + as a ZIP archive using the :guilabel:`Download ZIP` button. + +#. Unzip the above files from the archive into a scratch directory. + +#. In the scratch directory, run ``bash prepare.sh`` to get things ready. + This creates a :file:`run` subdirectory to contain Supervisor-related and + log files, and a :file:`venv` subdirectory to contain a virtual environment + into which ``bottle``, ``gunicorn`` and ``supervisor`` are installed. + +#. Run ``bash ensure_app.sh`` to ensure that Supervisor is running with + the above configuration. + +#. Run ``venv/bin/python client.py`` to exercise the web application, + which will lead to records being written to the log. + +#. Inspect the log files in the :file:`run` subdirectory. You should see the + most recent log lines in files matching the pattern :file:`app.log*`. They won't be in + any particular order, since they have been handled concurrently by different + worker processes in a non-deterministic way. + +#. You can shut down the listener and the web application by running + ``venv/bin/supervisorctl -c supervisor.conf shutdown``. + +You may need to tweak the configuration files in the unlikely event that the +configured ports clash with something else in your test environment. .. _context-info: diff --git a/Doc/includes/custom2.c b/Doc/includes/custom2.c index 2a3c59f8f04c3d..a3b2d6ab78d3c4 100644 --- a/Doc/includes/custom2.c +++ b/Doc/includes/custom2.c @@ -1,6 +1,6 @@ #define PY_SSIZE_T_CLEAN #include <Python.h> -#include "structmember.h" +#include <stddef.h> /* for offsetof() */ typedef struct { PyObject_HEAD @@ -42,7 +42,7 @@ static int Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"first", "last", "number", NULL}; - PyObject *first = NULL, *last = NULL, *tmp; + PyObject *first = NULL, *last = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist, &first, &last, @@ -50,26 +50,20 @@ Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) return -1; if (first) { - tmp = self->first; - Py_INCREF(first); - self->first = first; - Py_XDECREF(tmp); + Py_XSETREF(self->first, Py_NewRef(first)); } if (last) { - tmp = self->last; - Py_INCREF(last); - self->last = last; - Py_XDECREF(tmp); + Py_XSETREF(self->last, Py_NewRef(last)); } return 0; } static PyMemberDef Custom_members[] = { - {"first", T_OBJECT_EX, offsetof(CustomObject, first), 0, + {"first", Py_T_OBJECT_EX, offsetof(CustomObject, first), 0, "first name"}, - {"last", T_OBJECT_EX, offsetof(CustomObject, last), 0, + {"last", Py_T_OBJECT_EX, offsetof(CustomObject, last), 0, "last name"}, - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -127,9 +121,7 @@ PyInit_custom2(void) if (m == NULL) return NULL; - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Doc/includes/custom3.c b/Doc/includes/custom3.c index 5a47530f0a6b0d..1a68bc4be8c399 100644 --- a/Doc/includes/custom3.c +++ b/Doc/includes/custom3.c @@ -1,6 +1,6 @@ #define PY_SSIZE_T_CLEAN #include <Python.h> -#include "structmember.h" +#include <stddef.h> /* for offsetof() */ typedef struct { PyObject_HEAD @@ -42,7 +42,7 @@ static int Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"first", "last", "number", NULL}; - PyObject *first = NULL, *last = NULL, *tmp; + PyObject *first = NULL, *last = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|UUi", kwlist, &first, &last, @@ -50,22 +50,16 @@ Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) return -1; if (first) { - tmp = self->first; - Py_INCREF(first); - self->first = first; - Py_DECREF(tmp); + Py_SETREF(self->first, Py_NewRef(first)); } if (last) { - tmp = self->last; - Py_INCREF(last); - self->last = last; - Py_DECREF(tmp); + Py_SETREF(self->last, Py_NewRef(last)); } return 0; } static PyMemberDef Custom_members[] = { - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -73,14 +67,12 @@ static PyMemberDef Custom_members[] = { static PyObject * Custom_getfirst(CustomObject *self, void *closure) { - Py_INCREF(self->first); - return self->first; + return Py_NewRef(self->first); } static int Custom_setfirst(CustomObject *self, PyObject *value, void *closure) { - PyObject *tmp; if (value == NULL) { PyErr_SetString(PyExc_TypeError, "Cannot delete the first attribute"); return -1; @@ -90,24 +82,19 @@ Custom_setfirst(CustomObject *self, PyObject *value, void *closure) "The first attribute value must be a string"); return -1; } - tmp = self->first; - Py_INCREF(value); - self->first = value; - Py_DECREF(tmp); + Py_SETREF(self->first, Py_NewRef(value)); return 0; } static PyObject * Custom_getlast(CustomObject *self, void *closure) { - Py_INCREF(self->last); - return self->last; + return Py_NewRef(self->last); } static int Custom_setlast(CustomObject *self, PyObject *value, void *closure) { - PyObject *tmp; if (value == NULL) { PyErr_SetString(PyExc_TypeError, "Cannot delete the last attribute"); return -1; @@ -117,10 +104,7 @@ Custom_setlast(CustomObject *self, PyObject *value, void *closure) "The last attribute value must be a string"); return -1; } - tmp = self->last; - Py_INCREF(value); - self->last = value; - Py_DECREF(tmp); + Py_SETREF(self->last, Py_NewRef(value)); return 0; } @@ -178,9 +162,7 @@ PyInit_custom3(void) if (m == NULL) return NULL; - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Doc/includes/custom4.c b/Doc/includes/custom4.c index c7ee55578488ed..b932d159d26e93 100644 --- a/Doc/includes/custom4.c +++ b/Doc/includes/custom4.c @@ -1,6 +1,6 @@ #define PY_SSIZE_T_CLEAN #include <Python.h> -#include "structmember.h" +#include <stddef.h> /* for offsetof() */ typedef struct { PyObject_HEAD @@ -58,7 +58,7 @@ static int Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"first", "last", "number", NULL}; - PyObject *first = NULL, *last = NULL, *tmp; + PyObject *first = NULL, *last = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|UUi", kwlist, &first, &last, @@ -66,22 +66,16 @@ Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) return -1; if (first) { - tmp = self->first; - Py_INCREF(first); - self->first = first; - Py_DECREF(tmp); + Py_SETREF(self->first, Py_NewRef(first)); } if (last) { - tmp = self->last; - Py_INCREF(last); - self->last = last; - Py_DECREF(tmp); + Py_SETREF(self->last, Py_NewRef(last)); } return 0; } static PyMemberDef Custom_members[] = { - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -89,8 +83,7 @@ static PyMemberDef Custom_members[] = { static PyObject * Custom_getfirst(CustomObject *self, void *closure) { - Py_INCREF(self->first); - return self->first; + return Py_NewRef(self->first); } static int @@ -105,17 +98,14 @@ Custom_setfirst(CustomObject *self, PyObject *value, void *closure) "The first attribute value must be a string"); return -1; } - Py_INCREF(value); - Py_CLEAR(self->first); - self->first = value; + Py_XSETREF(self->first, Py_NewRef(value)); return 0; } static PyObject * Custom_getlast(CustomObject *self, void *closure) { - Py_INCREF(self->last); - return self->last; + return Py_NewRef(self->last); } static int @@ -130,9 +120,7 @@ Custom_setlast(CustomObject *self, PyObject *value, void *closure) "The last attribute value must be a string"); return -1; } - Py_INCREF(value); - Py_CLEAR(self->last); - self->last = value; + Py_XSETREF(self->last, Py_NewRef(value)); return 0; } @@ -192,9 +180,7 @@ PyInit_custom4(void) if (m == NULL) return NULL; - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Doc/includes/typestruct.h b/Doc/includes/typestruct.h index 02f8ccfe4438a5..f0ad1e47cb0d86 100644 --- a/Doc/includes/typestruct.h +++ b/Doc/includes/typestruct.h @@ -80,4 +80,7 @@ typedef struct _typeobject { destructor tp_finalize; vectorcallfunc tp_vectorcall; + + /* bitset of which type-watchers care about this type */ + char tp_watched; } PyTypeObject; diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst index 9df9e7914e093b..122692a428594f 100644 --- a/Doc/library/_thread.rst +++ b/Doc/library/_thread.rst @@ -57,6 +57,8 @@ This module defines the following constants and functions: When the function raises a :exc:`SystemExit` exception, it is silently ignored. + .. audit-event:: _thread.start_new_thread function,args,kwargs start_new_thread + .. versionchanged:: 3.8 :func:`sys.unraisablehook` is now used to handle unhandled exceptions. diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index c55d94421e5b14..e6c96486492572 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -565,6 +565,7 @@ arguments they contain. For example:: >>> with open('args.txt', 'w', encoding=sys.getfilesystemencoding()) as fp: ... fp.write('-f\nbar') + ... >>> parser = argparse.ArgumentParser(fromfile_prefix_chars='@') >>> parser.add_argument('-f') >>> parser.parse_args(['-f', 'foo', '@args.txt']) @@ -1945,7 +1946,7 @@ Argument groups .. method:: ArgumentParser.add_argument_group(title=None, description=None) By default, :class:`ArgumentParser` groups command-line arguments into - "positional arguments" and "optional arguments" when displaying help + "positional arguments" and "options" when displaying help messages. When there is a better conceptual grouping of arguments than this default one, appropriate groups can be created using the :meth:`add_argument_group` method:: diff --git a/Doc/library/asynchat.rst b/Doc/library/asynchat.rst deleted file mode 100644 index 32e04ad6d19533..00000000000000 --- a/Doc/library/asynchat.rst +++ /dev/null @@ -1,217 +0,0 @@ -:mod:`asynchat` --- Asynchronous socket command/response handler -================================================================ - -.. module:: asynchat - :synopsis: Support for asynchronous command/response protocols. - :deprecated: - -.. moduleauthor:: Sam Rushing <rushing@nightmare.com> -.. sectionauthor:: Steve Holden <sholden@holdenweb.com> - -**Source code:** :source:`Lib/asynchat.py` - -.. deprecated-removed:: 3.6 3.12 - The :mod:`asynchat` module is deprecated - (see :pep:`PEP 594 <594#asynchat>` for details). - Please use :mod:`asyncio` instead. - --------------- - -.. note:: - - This module exists for backwards compatibility only. For new code we - recommend using :mod:`asyncio`. - -This module builds on the :mod:`asyncore` infrastructure, simplifying -asynchronous clients and servers and making it easier to handle protocols -whose elements are terminated by arbitrary strings, or are of variable length. -:mod:`asynchat` defines the abstract class :class:`async_chat` that you -subclass, providing implementations of the :meth:`collect_incoming_data` and -:meth:`found_terminator` methods. It uses the same asynchronous loop as -:mod:`asyncore`, and the two types of channel, :class:`asyncore.dispatcher` -and :class:`asynchat.async_chat`, can freely be mixed in the channel map. -Typically an :class:`asyncore.dispatcher` server channel generates new -:class:`asynchat.async_chat` channel objects as it receives incoming -connection requests. - -.. include:: ../includes/wasm-notavail.rst - -.. class:: async_chat() - - This class is an abstract subclass of :class:`asyncore.dispatcher`. To make - practical use of the code you must subclass :class:`async_chat`, providing - meaningful :meth:`collect_incoming_data` and :meth:`found_terminator` - methods. - The :class:`asyncore.dispatcher` methods can be used, although not all make - sense in a message/response context. - - Like :class:`asyncore.dispatcher`, :class:`async_chat` defines a set of - events that are generated by an analysis of socket conditions after a - :c:func:`select` call. Once the polling loop has been started the - :class:`async_chat` object's methods are called by the event-processing - framework with no action on the part of the programmer. - - Two class attributes can be modified, to improve performance, or possibly - even to conserve memory. - - - .. data:: ac_in_buffer_size - - The asynchronous input buffer size (default ``4096``). - - - .. data:: ac_out_buffer_size - - The asynchronous output buffer size (default ``4096``). - - Unlike :class:`asyncore.dispatcher`, :class:`async_chat` allows you to - define a :abbr:`FIFO (first-in, first-out)` queue of *producers*. A producer need - have only one method, :meth:`more`, which should return data to be - transmitted on the channel. - The producer indicates exhaustion (*i.e.* that it contains no more data) by - having its :meth:`more` method return the empty bytes object. At this point - the :class:`async_chat` object removes the producer from the queue and starts - using the next producer, if any. When the producer queue is empty the - :meth:`handle_write` method does nothing. You use the channel object's - :meth:`set_terminator` method to describe how to recognize the end of, or - an important breakpoint in, an incoming transmission from the remote - endpoint. - - To build a functioning :class:`async_chat` subclass your input methods - :meth:`collect_incoming_data` and :meth:`found_terminator` must handle the - data that the channel receives asynchronously. The methods are described - below. - - -.. method:: async_chat.close_when_done() - - Pushes a ``None`` on to the producer queue. When this producer is popped off - the queue it causes the channel to be closed. - - -.. method:: async_chat.collect_incoming_data(data) - - Called with *data* holding an arbitrary amount of received data. The - default method, which must be overridden, raises a - :exc:`NotImplementedError` exception. - - -.. method:: async_chat.discard_buffers() - - In emergencies this method will discard any data held in the input and/or - output buffers and the producer queue. - - -.. method:: async_chat.found_terminator() - - Called when the incoming data stream matches the termination condition set - by :meth:`set_terminator`. The default method, which must be overridden, - raises a :exc:`NotImplementedError` exception. The buffered input data - should be available via an instance attribute. - - -.. method:: async_chat.get_terminator() - - Returns the current terminator for the channel. - - -.. method:: async_chat.push(data) - - Pushes data on to the channel's queue to ensure its transmission. - This is all you need to do to have the channel write the data out to the - network, although it is possible to use your own producers in more complex - schemes to implement encryption and chunking, for example. - - -.. method:: async_chat.push_with_producer(producer) - - Takes a producer object and adds it to the producer queue associated with - the channel. When all currently pushed producers have been exhausted the - channel will consume this producer's data by calling its :meth:`more` - method and send the data to the remote endpoint. - - -.. method:: async_chat.set_terminator(term) - - Sets the terminating condition to be recognized on the channel. ``term`` - may be any of three types of value, corresponding to three different ways - to handle incoming protocol data. - - +-----------+---------------------------------------------+ - | term | Description | - +===========+=============================================+ - | *string* | Will call :meth:`found_terminator` when the | - | | string is found in the input stream | - +-----------+---------------------------------------------+ - | *integer* | Will call :meth:`found_terminator` when the | - | | indicated number of characters have been | - | | received | - +-----------+---------------------------------------------+ - | ``None`` | The channel continues to collect data | - | | forever | - +-----------+---------------------------------------------+ - - Note that any data following the terminator will be available for reading - by the channel after :meth:`found_terminator` is called. - - -.. _asynchat-example: - -asynchat Example ----------------- - -The following partial example shows how HTTP requests can be read with -:class:`async_chat`. A web server might create an -:class:`http_request_handler` object for each incoming client connection. -Notice that initially the channel terminator is set to match the blank line at -the end of the HTTP headers, and a flag indicates that the headers are being -read. - -Once the headers have been read, if the request is of type POST (indicating -that further data are present in the input stream) then the -``Content-Length:`` header is used to set a numeric terminator to read the -right amount of data from the channel. - -The :meth:`handle_request` method is called once all relevant input has been -marshalled, after setting the channel terminator to ``None`` to ensure that -any extraneous data sent by the web client are ignored. :: - - - import asynchat - - class http_request_handler(asynchat.async_chat): - - def __init__(self, sock, addr, sessions, log): - asynchat.async_chat.__init__(self, sock=sock) - self.addr = addr - self.sessions = sessions - self.ibuffer = [] - self.obuffer = b"" - self.set_terminator(b"\r\n\r\n") - self.reading_headers = True - self.handling = False - self.cgi_data = None - self.log = log - - def collect_incoming_data(self, data): - """Buffer the data""" - self.ibuffer.append(data) - - def found_terminator(self): - if self.reading_headers: - self.reading_headers = False - self.parse_headers(b"".join(self.ibuffer)) - self.ibuffer = [] - if self.op.upper() == b"POST": - clen = self.headers.getheader("content-length") - self.set_terminator(int(clen)) - else: - self.handling = True - self.set_terminator(None) - self.handle_request() - elif not self.handling: - self.set_terminator(None) # browsers sometimes over-send - self.cgi_data = parse(self.headers, b"".join(self.ibuffer)) - self.handling = True - self.ibuffer = [] - self.handle_request() diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 4f12074dd70098..fd47b0c24d8a16 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -33,7 +33,8 @@ an event loop: Return the running event loop in the current OS thread. - If there is no running event loop a :exc:`RuntimeError` is raised. + Raise a :exc:`RuntimeError` if there is no running event loop. + This function can only be called from a coroutine or a callback. .. versionadded:: 3.7 @@ -42,27 +43,31 @@ an event loop: Get the current event loop. - If there is no current event loop set in the current OS thread, - the OS thread is main, and :func:`set_event_loop` has not yet - been called, asyncio will create a new event loop and set it as the - current one. + When called from a coroutine or a callback (e.g. scheduled with + call_soon or similar API), this function will always return the + running event loop. + + If there is no running event loop set, the function will return + the result of calling ``get_event_loop_policy().get_event_loop()``. Because this function has rather complex behavior (especially when custom event loop policies are in use), using the :func:`get_running_loop` function is preferred to :func:`get_event_loop` in coroutines and callbacks. - Consider also using the :func:`asyncio.run` function instead of using - lower level functions to manually create and close an event loop. + As noted above, consider using the higher-level :func:`asyncio.run` function, + instead of using these lower level functions to manually create and close an + event loop. - .. deprecated:: 3.10 - Deprecation warning is emitted if there is no running event loop. - In future Python releases, this function will be an alias of - :func:`get_running_loop`. + .. note:: + In Python versions 3.10.0--3.10.8 and 3.11.0 this function + (and other functions which used it implicitly) emitted a + :exc:`DeprecationWarning` if there was no running event loop, even if + the current loop was set. .. function:: set_event_loop(loop) - Set *loop* as a current event loop for the current OS thread. + Set *loop* as the current event loop for the current OS thread. .. function:: new_event_loop() @@ -877,9 +882,14 @@ TLS Upgrade Upgrade an existing transport-based connection to TLS. - Return a new transport instance, that the *protocol* must start using - immediately after the *await*. The *transport* instance passed to - the *start_tls* method should never be used again. + Create a TLS coder/decoder instance and insert it between the *transport* + and the *protocol*. The coder/decoder implements both *transport*-facing + protocol and *protocol*-facing transport. + + Return the created two-interface instance. After *await*, the *protocol* + must stop using the original *transport* and communicate with the returned + object only because the coder caches *protocol*-side data and sporadically + exchanges extra TLS session packets with *transport*. Parameters: diff --git a/Doc/library/asyncio-llapi-index.rst b/Doc/library/asyncio-llapi-index.rst index b7ad888a7b67ab..9ce48a24444e66 100644 --- a/Doc/library/asyncio-llapi-index.rst +++ b/Doc/library/asyncio-llapi-index.rst @@ -19,7 +19,7 @@ Obtaining the Event Loop - The **preferred** function to get the running event loop. * - :func:`asyncio.get_event_loop` - - Get an event loop instance (current or via the policy). + - Get an event loop instance (running or current via the current policy). * - :func:`asyncio.set_event_loop` - Set the event loop as current via the current policy. diff --git a/Doc/library/asyncio-policy.rst b/Doc/library/asyncio-policy.rst index 052378ef32743b..ccd95244947534 100644 --- a/Doc/library/asyncio-policy.rst +++ b/Doc/library/asyncio-policy.rst @@ -116,6 +116,10 @@ asyncio ships with the following built-in policies: On Windows, :class:`ProactorEventLoop` is now used by default. + .. versionchanged:: 3.12 + :meth:`get_event_loop` now raises a :exc:`RuntimeError` if there is no + current event loop set. + .. class:: WindowsSelectorEventLoopPolicy @@ -222,6 +226,9 @@ implementation used by the asyncio event loop: This method has to be called to ensure that underlying resources are cleaned-up. + .. deprecated:: 3.12 + + .. class:: ThreadedChildWatcher This implementation starts a new waiting thread for every subprocess spawn. diff --git a/Doc/library/asyncio-runner.rst b/Doc/library/asyncio-runner.rst index c43d664eba717f..b68b2570ef071e 100644 --- a/Doc/library/asyncio-runner.rst +++ b/Doc/library/asyncio-runner.rst @@ -22,7 +22,7 @@ to simplify async code usage for common wide-spread scenarios. Running an asyncio Program ========================== -.. function:: run(coro, *, debug=None) +.. function:: run(coro, *, debug=None, loop_factory=None) Execute the :term:`coroutine` *coro* and return the result. @@ -37,9 +37,11 @@ Running an asyncio Program debug mode explicitly. ``None`` is used to respect the global :ref:`asyncio-debug-mode` settings. - This function always creates a new event loop and closes it at - the end. It should be used as a main entry point for asyncio - programs, and should ideally only be called once. + If *loop_factory* is not ``None``, it is used to create a new event loop; + otherwise :func:`asyncio.new_event_loop` is used. The loop is closed at the end. + This function should be used as a main entry point for asyncio programs, + and should ideally only be called once. It is recommended to use + *loop_factory* to configure the event loop instead of policies. The executor is given a timeout duration of 5 minutes to shutdown. If the executor hasn't finished within that duration, a warning is @@ -62,6 +64,10 @@ Running an asyncio Program *debug* is ``None`` by default to respect the global debug mode settings. + .. versionchanged:: 3.12 + + Added *loop_factory* parameter. + Runner context manager ====================== diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index d87e3c042c9977..c1ae8abb9abcd5 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -52,6 +52,7 @@ and work with streams: limit=None, ssl=None, family=0, proto=0, \ flags=0, sock=None, local_addr=None, \ server_hostname=None, ssl_handshake_timeout=None, \ + ssl_shutdown_timeout=None, \ happy_eyeballs_delay=None, interleave=None) Establish a network connection and return a pair of @@ -82,6 +83,9 @@ and work with streams: .. versionchanged:: 3.10 Removed the *loop* parameter. + .. versionchanged:: 3.11 + Added the *ssl_shutdown_timeout* parameter. + .. coroutinefunction:: start_server(client_connected_cb, host=None, \ port=None, *, limit=None, \ @@ -89,7 +93,7 @@ and work with streams: flags=socket.AI_PASSIVE, sock=None, \ backlog=100, ssl=None, reuse_address=None, \ reuse_port=None, ssl_handshake_timeout=None, \ - start_serving=True) + ssl_shutdown_timeout=None, start_serving=True) Start a socket server. @@ -121,12 +125,15 @@ and work with streams: .. versionchanged:: 3.10 Removed the *loop* parameter. + .. versionchanged:: 3.11 + Added the *ssl_shutdown_timeout* parameter. + .. rubric:: Unix Sockets .. coroutinefunction:: open_unix_connection(path=None, *, limit=None, \ ssl=None, sock=None, server_hostname=None, \ - ssl_handshake_timeout=None) + ssl_handshake_timeout=None, ssl_shutdown_timeout=None) Establish a Unix socket connection and return a pair of ``(reader, writer)``. @@ -150,10 +157,14 @@ and work with streams: .. versionchanged:: 3.10 Removed the *loop* parameter. + .. versionchanged:: 3.11 + Added the *ssl_shutdown_timeout* parameter. + .. coroutinefunction:: start_unix_server(client_connected_cb, path=None, \ *, limit=None, sock=None, backlog=100, ssl=None, \ - ssl_handshake_timeout=None, start_serving=True) + ssl_handshake_timeout=None, \ + ssl_shutdown_timeout=None, start_serving=True) Start a Unix socket server. @@ -176,6 +187,9 @@ and work with streams: .. versionchanged:: 3.10 Removed the *loop* parameter. + .. versionchanged:: 3.11 + Added the *ssl_shutdown_timeout* parameter. + StreamReader ============ diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 28d0b21e8180b6..4274638c5e8625 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -175,7 +175,7 @@ their completion. * the :meth:`~asyncio.subprocess.Process.communicate` and :meth:`~asyncio.subprocess.Process.wait` methods don't have a - *timeout* parameter: use the :func:`wait_for` function; + *timeout* parameter: use the :func:`~asyncio.wait_for` function; * the :meth:`Process.wait() <asyncio.subprocess.Process.wait>` method is asynchronous, whereas :meth:`subprocess.Popen.wait` method diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index fb6d23fda03e41..631a5ddc1f6500 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -18,6 +18,10 @@ and Tasks. Coroutines ========== +**Source code:** :source:`Lib/asyncio/coroutines.py` + +---------------------------------------------------- + :term:`Coroutines <coroutine>` declared with the async/await syntax is the preferred way of writing asyncio applications. For example, the following snippet of code prints "hello", waits 1 second, @@ -230,6 +234,10 @@ is :meth:`loop.run_in_executor`. Creating Tasks ============== +**Source code:** :source:`Lib/asyncio/tasks.py` + +----------------------------------------------- + .. function:: create_task(coro, *, name=None, context=None) Wrap the *coro* :ref:`coroutine <coroutine>` into a :class:`Task` diff --git a/Doc/library/asyncore.rst b/Doc/library/asyncore.rst deleted file mode 100644 index a3a4e90d05277e..00000000000000 --- a/Doc/library/asyncore.rst +++ /dev/null @@ -1,365 +0,0 @@ -:mod:`asyncore` --- Asynchronous socket handler -=============================================== - -.. module:: asyncore - :synopsis: A base class for developing asynchronous socket handling - services. - :deprecated: - -.. moduleauthor:: Sam Rushing <rushing@nightmare.com> -.. sectionauthor:: Christopher Petrilli <petrilli@amber.org> -.. sectionauthor:: Steve Holden <sholden@holdenweb.com> -.. heavily adapted from original documentation by Sam Rushing - -**Source code:** :source:`Lib/asyncore.py` - -.. deprecated-removed:: 3.6 3.12 - The :mod:`asyncore` module is deprecated - (see :pep:`PEP 594 <594#asyncore>` for details). - Please use :mod:`asyncio` instead. - --------------- - -.. note:: - - This module exists for backwards compatibility only. For new code we - recommend using :mod:`asyncio`. - -This module provides the basic infrastructure for writing asynchronous socket -service clients and servers. - -.. include:: ../includes/wasm-notavail.rst - -There are only two ways to have a program on a single processor do "more than -one thing at a time." Multi-threaded programming is the simplest and most -popular way to do it, but there is another very different technique, that lets -you have nearly all the advantages of multi-threading, without actually using -multiple threads. It's really only practical if your program is largely I/O -bound. If your program is processor bound, then pre-emptive scheduled threads -are probably what you really need. Network servers are rarely processor -bound, however. - -If your operating system supports the :c:func:`select` system call in its I/O -library (and nearly all do), then you can use it to juggle multiple -communication channels at once; doing other work while your I/O is taking -place in the "background." Although this strategy can seem strange and -complex, especially at first, it is in many ways easier to understand and -control than multi-threaded programming. The :mod:`asyncore` module solves -many of the difficult problems for you, making the task of building -sophisticated high-performance network servers and clients a snap. For -"conversational" applications and protocols the companion :mod:`asynchat` -module is invaluable. - -The basic idea behind both modules is to create one or more network -*channels*, instances of class :class:`asyncore.dispatcher` and -:class:`asynchat.async_chat`. Creating the channels adds them to a global -map, used by the :func:`loop` function if you do not provide it with your own -*map*. - -Once the initial channel(s) is(are) created, calling the :func:`loop` function -activates channel service, which continues until the last channel (including -any that have been added to the map during asynchronous service) is closed. - - -.. function:: loop([timeout[, use_poll[, map[,count]]]]) - - Enter a polling loop that terminates after count passes or all open - channels have been closed. All arguments are optional. The *count* - parameter defaults to ``None``, resulting in the loop terminating only when all - channels have been closed. The *timeout* argument sets the timeout - parameter for the appropriate :func:`~select.select` or :func:`~select.poll` - call, measured in seconds; the default is 30 seconds. The *use_poll* - parameter, if true, indicates that :func:`~select.poll` should be used in - preference to :func:`~select.select` (the default is ``False``). - - The *map* parameter is a dictionary whose items are the channels to watch. - As channels are closed they are deleted from their map. If *map* is - omitted, a global map is used. Channels (instances of - :class:`asyncore.dispatcher`, :class:`asynchat.async_chat` and subclasses - thereof) can freely be mixed in the map. - - -.. class:: dispatcher() - - The :class:`dispatcher` class is a thin wrapper around a low-level socket - object. To make it more useful, it has a few methods for event-handling - which are called from the asynchronous loop. Otherwise, it can be treated - as a normal non-blocking socket object. - - The firing of low-level events at certain times or in certain connection - states tells the asynchronous loop that certain higher-level events have - taken place. For example, if we have asked for a socket to connect to - another host, we know that the connection has been made when the socket - becomes writable for the first time (at this point you know that you may - write to it with the expectation of success). The implied higher-level - events are: - - +----------------------+----------------------------------------+ - | Event | Description | - +======================+========================================+ - | ``handle_connect()`` | Implied by the first read or write | - | | event | - +----------------------+----------------------------------------+ - | ``handle_close()`` | Implied by a read event with no data | - | | available | - +----------------------+----------------------------------------+ - | ``handle_accepted()``| Implied by a read event on a listening | - | | socket | - +----------------------+----------------------------------------+ - - During asynchronous processing, each mapped channel's :meth:`readable` and - :meth:`writable` methods are used to determine whether the channel's socket - should be added to the list of channels :c:func:`select`\ ed or - :c:func:`poll`\ ed for read and write events. - - Thus, the set of channel events is larger than the basic socket events. The - full set of methods that can be overridden in your subclass follows: - - - .. method:: handle_read() - - Called when the asynchronous loop detects that a :meth:`read` call on the - channel's socket will succeed. - - - .. method:: handle_write() - - Called when the asynchronous loop detects that a writable socket can be - written. Often this method will implement the necessary buffering for - performance. For example:: - - def handle_write(self): - sent = self.send(self.buffer) - self.buffer = self.buffer[sent:] - - - .. method:: handle_expt() - - Called when there is out of band (OOB) data for a socket connection. This - will almost never happen, as OOB is tenuously supported and rarely used. - - - .. method:: handle_connect() - - Called when the active opener's socket actually makes a connection. Might - send a "welcome" banner, or initiate a protocol negotiation with the - remote endpoint, for example. - - - .. method:: handle_close() - - Called when the socket is closed. - - - .. method:: handle_error() - - Called when an exception is raised and not otherwise handled. The default - version prints a condensed traceback. - - - .. method:: handle_accept() - - Called on listening channels (passive openers) when a connection can be - established with a new remote endpoint that has issued a :meth:`connect` - call for the local endpoint. Deprecated in version 3.2; use - :meth:`handle_accepted` instead. - - .. deprecated:: 3.2 - - - .. method:: handle_accepted(sock, addr) - - Called on listening channels (passive openers) when a connection has been - established with a new remote endpoint that has issued a :meth:`connect` - call for the local endpoint. *sock* is a *new* socket object usable to - send and receive data on the connection, and *addr* is the address - bound to the socket on the other end of the connection. - - .. versionadded:: 3.2 - - - .. method:: readable() - - Called each time around the asynchronous loop to determine whether a - channel's socket should be added to the list on which read events can - occur. The default method simply returns ``True``, indicating that by - default, all channels will be interested in read events. - - - .. method:: writable() - - Called each time around the asynchronous loop to determine whether a - channel's socket should be added to the list on which write events can - occur. The default method simply returns ``True``, indicating that by - default, all channels will be interested in write events. - - - In addition, each channel delegates or extends many of the socket methods. - Most of these are nearly identical to their socket partners. - - - .. method:: create_socket(family=socket.AF_INET, type=socket.SOCK_STREAM) - - This is identical to the creation of a normal socket, and will use the - same options for creation. Refer to the :mod:`socket` documentation for - information on creating sockets. - - .. versionchanged:: 3.3 - *family* and *type* arguments can be omitted. - - - .. method:: connect(address) - - As with the normal socket object, *address* is a tuple with the first - element the host to connect to, and the second the port number. - - - .. method:: send(data) - - Send *data* to the remote end-point of the socket. - - - .. method:: recv(buffer_size) - - Read at most *buffer_size* bytes from the socket's remote end-point. An - empty bytes object implies that the channel has been closed from the - other end. - - Note that :meth:`recv` may raise :exc:`BlockingIOError` , even though - :func:`select.select` or :func:`select.poll` has reported the socket - ready for reading. - - - .. method:: listen(backlog) - - Listen for connections made to the socket. The *backlog* argument - specifies the maximum number of queued connections and should be at least - 1; the maximum value is system-dependent (usually 5). - - - .. method:: bind(address) - - Bind the socket to *address*. The socket must not already be bound. (The - format of *address* depends on the address family --- refer to the - :mod:`socket` documentation for more information.) To mark - the socket as re-usable (setting the :const:`SO_REUSEADDR` option), call - the :class:`dispatcher` object's :meth:`set_reuse_addr` method. - - - .. method:: accept() - - Accept a connection. The socket must be bound to an address and listening - for connections. The return value can be either ``None`` or a pair - ``(conn, address)`` where *conn* is a *new* socket object usable to send - and receive data on the connection, and *address* is the address bound to - the socket on the other end of the connection. - When ``None`` is returned it means the connection didn't take place, in - which case the server should just ignore this event and keep listening - for further incoming connections. - - - .. method:: close() - - Close the socket. All future operations on the socket object will fail. - The remote end-point will receive no more data (after queued data is - flushed). Sockets are automatically closed when they are - garbage-collected. - - -.. class:: dispatcher_with_send() - - A :class:`dispatcher` subclass which adds simple buffered output capability, - useful for simple clients. For more sophisticated usage use - :class:`asynchat.async_chat`. - -.. class:: file_dispatcher() - - A file_dispatcher takes a file descriptor or :term:`file object` along - with an optional map argument and wraps it for use with the :c:func:`poll` - or :c:func:`loop` functions. If provided a file object or anything with a - :c:func:`fileno` method, that method will be called and passed to the - :class:`file_wrapper` constructor. - - .. availability:: Unix. - -.. class:: file_wrapper() - - A file_wrapper takes an integer file descriptor and calls :func:`os.dup` to - duplicate the handle so that the original handle may be closed independently - of the file_wrapper. This class implements sufficient methods to emulate a - socket for use by the :class:`file_dispatcher` class. - - .. availability:: Unix. - - -.. _asyncore-example-1: - -asyncore Example basic HTTP client ----------------------------------- - -Here is a very basic HTTP client that uses the :class:`dispatcher` class to -implement its socket handling:: - - import asyncore - - class HTTPClient(asyncore.dispatcher): - - def __init__(self, host, path): - asyncore.dispatcher.__init__(self) - self.create_socket() - self.connect( (host, 80) ) - self.buffer = bytes('GET %s HTTP/1.0\r\nHost: %s\r\n\r\n' % - (path, host), 'ascii') - - def handle_connect(self): - pass - - def handle_close(self): - self.close() - - def handle_read(self): - print(self.recv(8192)) - - def writable(self): - return (len(self.buffer) > 0) - - def handle_write(self): - sent = self.send(self.buffer) - self.buffer = self.buffer[sent:] - - - client = HTTPClient('www.python.org', '/') - asyncore.loop() - -.. _asyncore-example-2: - -asyncore Example basic echo server ----------------------------------- - -Here is a basic echo server that uses the :class:`dispatcher` class to accept -connections and dispatches the incoming connections to a handler:: - - import asyncore - - class EchoHandler(asyncore.dispatcher_with_send): - - def handle_read(self): - data = self.recv(8192) - if data: - self.send(data) - - class EchoServer(asyncore.dispatcher): - - def __init__(self, host, port): - asyncore.dispatcher.__init__(self) - self.create_socket() - self.set_reuse_addr() - self.bind((host, port)) - self.listen(5) - - def handle_accepted(self, sock, addr): - print('Incoming connection from %s' % repr(addr)) - handler = EchoHandler(sock) - - server = EchoServer('localhost', 8080) - asyncore.loop() diff --git a/Doc/library/base64.rst b/Doc/library/base64.rst index a02ba739146aaf..4ca3768f827c6b 100644 --- a/Doc/library/base64.rst +++ b/Doc/library/base64.rst @@ -53,11 +53,13 @@ The modern interface provides: Encode the :term:`bytes-like object` *s* using Base64 and return the encoded :class:`bytes`. - Optional *altchars* must be a :term:`bytes-like object` of at least - length 2 (additional characters are ignored) which specifies an alternative - alphabet for the ``+`` and ``/`` characters. This allows an application to e.g. - generate URL or filesystem safe Base64 strings. The default is ``None``, for - which the standard Base64 alphabet is used. + Optional *altchars* must be a :term:`bytes-like object` of length 2 which + specifies an alternative alphabet for the ``+`` and ``/`` characters. + This allows an application to e.g. generate URL or filesystem safe Base64 + strings. The default is ``None``, for which the standard Base64 alphabet is used. + + May assert or raise a a :exc:`ValueError` if the length of *altchars* is not 2. Raises a + :exc:`TypeError` if *altchars* is not a :term:`bytes-like object`. .. function:: b64decode(s, altchars=None, validate=False) @@ -65,9 +67,9 @@ The modern interface provides: Decode the Base64 encoded :term:`bytes-like object` or ASCII string *s* and return the decoded :class:`bytes`. - Optional *altchars* must be a :term:`bytes-like object` or ASCII string of - at least length 2 (additional characters are ignored) which specifies the - alternative alphabet used instead of the ``+`` and ``/`` characters. + Optional *altchars* must be a :term:`bytes-like object` or ASCII string + of length 2 which specifies the alternative alphabet used instead of the + ``+`` and ``/`` characters. A :exc:`binascii.Error` exception is raised if *s* is incorrectly padded. @@ -80,6 +82,7 @@ The modern interface provides: For more information about the strict base64 check, see :func:`binascii.a2b_base64` + May assert or raise a :exc:`ValueError` if the length of *altchars* is not 2. .. function:: standard_b64encode(s) diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst index ae5a1598f84b44..32df99869eb530 100644 --- a/Doc/library/bz2.rst +++ b/Doc/library/bz2.rst @@ -320,9 +320,11 @@ Writing and reading a bzip2-compressed file in binary mode: >>> with bz2.open("myfile.bz2", "wb") as f: ... # Write compressed data to file ... unused = f.write(data) + ... >>> with bz2.open("myfile.bz2", "rb") as f: ... # Decompress data from file ... content = f.read() + ... >>> content == data # Check equality to original object after round-trip True diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index 53b4b69f84b7bf..2cffc2300a2298 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -229,6 +229,7 @@ For example:: >>> cnt = Counter() >>> for word in ['red', 'blue', 'red', 'green', 'blue', 'blue']: ... cnt[word] += 1 + ... >>> cnt Counter({'blue': 3, 'red': 2, 'green': 1}) @@ -818,6 +819,7 @@ zero): >>> def constant_factory(value): ... return lambda: value + ... >>> d = defaultdict(constant_factory('<missing>')) >>> d.update(name='John', action='ran') >>> '%(name)s %(action)s to %(object)s' % d diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 0351ec970be0b8..fd5df875ed74d5 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -6,6 +6,8 @@ .. moduleauthor:: Thomas Heller <theller@python.net> +**Source code:** :source:`Lib/ctypes` + -------------- :mod:`ctypes` is a foreign function library for Python. It provides C compatible @@ -359,7 +361,7 @@ from within *IDLE* or *PythonWin*:: >>> printf(b"%f bottles of beer\n", 42.5) Traceback (most recent call last): File "<stdin>", line 1, in <module> - ArgumentError: argument 2: exceptions.TypeError: Don't know how to convert parameter 2 + ArgumentError: argument 2: TypeError: Don't know how to convert parameter 2 >>> As has been mentioned before, all Python types except integers, strings, and @@ -371,6 +373,26 @@ that they can be converted to the required C data type:: 31 >>> +.. _ctypes-calling-variadic-functions: + +Calling varadic functions +^^^^^^^^^^^^^^^^^^^^^^^^^ + +On a lot of platforms calling variadic functions through ctypes is exactly the same +as calling functions with a fixed number of parameters. On some platforms, and in +particular ARM64 for Apple Platforms, the calling convention for variadic functions +is different than that for regular functions. + +On those platforms it is required to specify the *argtypes* attribute for the +regular, non-variadic, function arguments: + +.. code-block:: python3 + + libc.printf.argtypes = [ctypes.c_char_p] + +Because specifying the attribute does inhibit portability it is adviced to always +specify ``argtypes`` for all variadic functions. + .. _ctypes-calling-functions-with-own-custom-data-types: @@ -422,7 +444,7 @@ prototype for a C function), and tries to convert the arguments to valid types:: >>> printf(b"%d %d %d", 1, 2, 3) Traceback (most recent call last): File "<stdin>", line 1, in <module> - ArgumentError: argument 2: exceptions.TypeError: wrong type + ArgumentError: argument 2: TypeError: wrong type >>> printf(b"%s %d %f\n", b"X", 2, 3) X 2 3.000000 13 @@ -487,7 +509,7 @@ single character Python bytes object into a C char:: >>> strchr(b"abcdef", b"def") Traceback (most recent call last): File "<stdin>", line 1, in <module> - ArgumentError: argument 2: exceptions.TypeError: one character string expected + ArgumentError: argument 2: TypeError: one character string expected >>> print(strchr(b"abcdef", b"x")) None >>> strchr(b"abcdef", b"d") @@ -1418,8 +1440,8 @@ copy of the windows error code. The *winmode* parameter is used on Windows to specify how the library is loaded (since *mode* is ignored). It takes any value that is valid for the Win32 API -``LoadLibraryEx`` flags parameter. When omitted, the default is to use the flags -that result in the most secure DLL load to avoiding issues such as DLL +``LoadLibraryEx`` flags parameter. When omitted, the default is to use the +flags that result in the most secure DLL load, which avoids issues such as DLL hijacking. Passing the full path to the DLL is the safest way to ensure the correct library and dependencies are loaded. diff --git a/Doc/library/curses.ascii.rst b/Doc/library/curses.ascii.rst index a69dbb2ac06572..e1d1171927c9e2 100644 --- a/Doc/library/curses.ascii.rst +++ b/Doc/library/curses.ascii.rst @@ -7,6 +7,8 @@ .. moduleauthor:: Eric S. Raymond <esr@thyrsus.com> .. sectionauthor:: Eric S. Raymond <esr@thyrsus.com> +**Source code:** :source:`Lib/curses/ascii.py` + -------------- The :mod:`curses.ascii` module supplies name constants for ASCII characters and diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index bf4e69a0170a62..f9f94b22f69e3d 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -9,6 +9,8 @@ .. sectionauthor:: Moshe Zadka <moshez@zadka.site.co.il> .. sectionauthor:: Eric Raymond <esr@thyrsus.com> +**Source code:** :source:`Lib/curses` + -------------- The :mod:`curses` module provides an interface to the curses library, the diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 847299649d1efd..32c524a7348719 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -79,7 +79,8 @@ Module contents class C: ... - @dataclass(init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False) + @dataclass(init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, + match_args=True, kw_only=False, slots=False, weakref_slot=False) class C: ... diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index f7e2bb3f3c6de3..8bfed19d3fd2c6 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -765,6 +765,7 @@ Example of counting days to an event:: >>> my_birthday = date(today.year, 6, 24) >>> if my_birthday < today: ... my_birthday = my_birthday.replace(year=today.year + 1) + ... >>> my_birthday datetime.date(2008, 6, 24) >>> time_to_birthday = abs(my_birthday - today) @@ -2601,7 +2602,7 @@ Notes: (9) When used with the :meth:`strptime` method, the leading zero is optional - for formats ``%d``, ``%m``, ``%H``, ``%I``, ``%M``, ``%S``, ``%J``, ``%U``, + for formats ``%d``, ``%m``, ``%H``, ``%I``, ``%M``, ``%S``, ``%j``, ``%U``, ``%W``, and ``%V``. Format ``%y`` does require a leading zero. .. rubric:: Footnotes diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 260108136df7f1..fec9b86864c578 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -2057,6 +2057,7 @@ to handle the :meth:`quantize` step: >>> def mul(x, y, fp=TWOPLACES): ... return (x * y).quantize(fp) + ... >>> def div(x, y, fp=TWOPLACES): ... return (x / y).quantize(fp) diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index 1eaa9f32442deb..30bbf95be63417 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -607,6 +607,15 @@ the original TOS1. .. versionadded:: 3.12 +.. opcode:: STOPITERATION_ERROR + + Handles a StopIteration raised in a generator or coroutine. + If TOS is an instance of :exc:`StopIteration`, or :exc:`StopAsyncIteration` + replace it with a :exc:`RuntimeError`. + + .. versionadded:: 3.12 + + .. opcode:: BEFORE_ASYNC_WITH Resolves ``__aenter__`` and ``__aexit__`` from the object on top of the diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst index 75c6ee289a91e9..d6e4dca0860671 100644 --- a/Doc/library/doctest.rst +++ b/Doc/library/doctest.rst @@ -351,6 +351,7 @@ The fine print: >>> def f(x): ... r'''Backslashes in a raw docstring: m\n''' + ... >>> print(f.__doc__) Backslashes in a raw docstring: m\n @@ -360,6 +361,7 @@ The fine print: >>> def f(x): ... '''Backslashes in a raw docstring: m\\n''' + ... >>> print(f.__doc__) Backslashes in a raw docstring: m\n @@ -696,10 +698,10 @@ special Python comments following an example's source code: .. productionlist:: doctest directive: "#" "doctest:" `directive_options` - directive_options: `directive_option` ("," `directive_option`)\* + directive_options: `directive_option` ("," `directive_option`)* directive_option: `on_or_off` `directive_option_name` - on_or_off: "+" \| "-" - directive_option_name: "DONT_ACCEPT_BLANKLINE" \| "NORMALIZE_WHITESPACE" \| ... + on_or_off: "+" | "-" + directive_option_name: "DONT_ACCEPT_BLANKLINE" | "NORMALIZE_WHITESPACE" | ... Whitespace is not allowed between the ``+`` or ``-`` and the directive option name. The directive option name can be any of the option flag names explained @@ -1055,7 +1057,7 @@ from text files and modules with doctests: from a text file using :func:`DocFileSuite`. -.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, checker=None) +.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, optionflags=0, checker=None) Convert doctest tests for a module to a :class:`unittest.TestSuite`. diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst index bf53b9520fc723..2439dee676c9b0 100644 --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -97,6 +97,7 @@ file on disk and pass it to the system ``sendmail`` program on a Unix system: >>> from subprocess import Popen, PIPE >>> with open('mymsg.txt', 'rb') as f: ... msg = message_from_binary_file(f, policy=policy.default) + ... >>> p = Popen(['sendmail', msg['To'].addresses[0]], stdin=PIPE) >>> g = BytesGenerator(p.stdin, policy=msg.policy.clone(linesep='\r\n')) >>> g.flatten(msg) diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst index 34f45e20bae9d9..d7f89cf96368b5 100644 --- a/Doc/library/ensurepip.rst +++ b/Doc/library/ensurepip.rst @@ -7,6 +7,8 @@ .. versionadded:: 3.4 +**Source code:** :source:`Lib/ensurepip` + -------------- The :mod:`ensurepip` package provides support for bootstrapping the ``pip`` diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index dcced28b8508e8..25a6e1f0b61677 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -27,7 +27,8 @@ An enumeration: * is a set of symbolic names (members) bound to unique values -* can be iterated over to return its members in definition order +* can be iterated over to return its canonical (i.e. non-alias) members in + definition order * uses *call* syntax to return members by value * uses *index* syntax to return members by name @@ -193,7 +194,7 @@ Data Types .. method:: EnumType.__getitem__(cls, name) - Returns the Enum member in *cls* matching *name*, or raises an :exc:`KeyError`:: + Returns the Enum member in *cls* matching *name*, or raises a :exc:`KeyError`:: >>> Color['BLUE'] <Color.BLUE: 3> @@ -240,10 +241,10 @@ Data Types .. note:: Enum member values - Member values can be anything: :class:`int`, :class:`str`, etc.. If + Member values can be anything: :class:`int`, :class:`str`, etc. If the exact value is unimportant you may use :class:`auto` instances and an - appropriate value will be chosen for you. Care must be taken if you mix - :class:`auto` with other values. + appropriate value will be chosen for you. See :class:`auto` for the + details. .. attribute:: Enum._ignore_ @@ -254,7 +255,7 @@ Data Types names will also be removed from the completed enumeration. See :ref:`TimePeriod <enum-time-period>` for an example. - .. method:: Enum.__call__(cls, value, names=None, \*, module=None, qualname=None, type=None, start=1, boundary=None) + .. method:: Enum.__call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None) This method is called in two different ways: @@ -271,8 +272,8 @@ Data Types :module: The name of the module the new Enum is created in. :qualname: The actual location in the module where this Enum can be found. :type: A mix-in type for the new Enum. - :start: The first integer value for the Enum (used by :class:`auto`) - :boundary: How to handle out-of-range values from bit operations (:class:`Flag` only) + :start: The first integer value for the Enum (used by :class:`auto`). + :boundary: How to handle out-of-range values from bit operations (:class:`Flag` only). .. method:: Enum.__dir__(self) @@ -291,6 +292,7 @@ Data Types ... @classmethod ... def today(cls): ... print('today is %s' % cls(date.today().isoweekday()).name) + ... >>> dir(Weekday.SATURDAY) ['__class__', '__doc__', '__eq__', '__hash__', '__module__', 'name', 'today', 'value'] @@ -308,13 +310,14 @@ Data Types >>> class PowersOfThree(Enum): ... @staticmethod ... def _generate_next_value_(name, start, count, last_values): - ... return (count + 1) * 3 + ... return 3 ** (count + 1) ... FIRST = auto() ... SECOND = auto() + ... >>> PowersOfThree.SECOND.value - 6 + 9 - .. method:: Enum.__init_subclass__(cls, \**kwds) + .. method:: Enum.__init_subclass__(cls, **kwds) A *classmethod* that is used to further configure subsequent subclasses. By default, does nothing. @@ -335,6 +338,7 @@ Data Types ... if member.value == value: ... return member ... return None + ... >>> Build.DEBUG.value 'debug' >>> Build('deBUG') @@ -352,6 +356,7 @@ Data Types ... def __repr__(self): ... cls_name = self.__class__.__name__ ... return f'{cls_name}.{self.name}' + ... >>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}" (OtherStyle.ALTERNATE, 'OtherStyle.ALTERNATE', 'OtherStyle.ALTERNATE') @@ -366,13 +371,14 @@ Data Types ... SOMETHING_ELSE = auto() ... def __str__(self): ... return f'{self.name}' + ... >>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}" (<OtherStyle.ALTERNATE: 1>, 'ALTERNATE', 'ALTERNATE') .. method:: Enum.__format__(self) Returns the string used for *format()* and *f-string* calls. By default, - returns :meth:`__str__` returns, but can be overridden:: + returns :meth:`__str__` return value, but can be overridden:: >>> class OtherStyle(Enum): ... ALTERNATE = auto() @@ -380,6 +386,7 @@ Data Types ... SOMETHING_ELSE = auto() ... def __format__(self, spec): ... return f'{self.name}' + ... >>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}" (<OtherStyle.ALTERNATE: 1>, 'OtherStyle.ALTERNATE', 'ALTERNATE') @@ -388,6 +395,8 @@ Data Types Using :class:`auto` with :class:`Enum` results in integers of increasing value, starting with ``1``. + .. versionchanged:: 3.12 Added :ref:`enum-dataclass-support` + .. class:: IntEnum @@ -400,6 +409,7 @@ Data Types ... ONE = 1 ... TWO = 2 ... THREE = 3 + ... >>> Numbers.THREE <Numbers.THREE: 3> >>> Numbers.ONE + Numbers.TWO @@ -425,19 +435,23 @@ Data Types in most of the same places that a string can be used. The result of any string operation performed on or with a *StrEnum* member is not part of the enumeration. - .. note:: There are places in the stdlib that check for an exact :class:`str` - instead of a :class:`str` subclass (i.e. ``type(unknown) == str`` - instead of ``isinstance(unknown, str)``), and in those locations you - will need to use ``str(StrEnum.member)``. + .. note:: + + There are places in the stdlib that check for an exact :class:`str` + instead of a :class:`str` subclass (i.e. ``type(unknown) == str`` + instead of ``isinstance(unknown, str)``), and in those locations you + will need to use ``str(StrEnum.member)``. .. note:: Using :class:`auto` with :class:`StrEnum` results in the lower-cased member name as the value. - .. note:: :meth:`__str__` is :func:`str.__str__` to better support the - *replacement of existing constants* use-case. :meth:`__format__` is likewise - :func:`str.__format__` for that same reason. + .. note:: + + :meth:`~object.__str__` is :meth:`!str.__str__` to better support the + *replacement of existing constants* use-case. :meth:`~object.__format__` is likewise + :meth:`!str.__format__` for that same reason. .. versionadded:: 3.11 @@ -456,6 +470,7 @@ Data Types ... RED = auto() ... GREEN = auto() ... BLUE = auto() + ... >>> purple = Color.RED | Color.BLUE >>> white = Color.RED | Color.GREEN | Color.BLUE >>> Color.GREEN in purple @@ -469,13 +484,17 @@ Data Types .. method:: __iter__(self): - Returns all contained members:: + Returns all contained non-alias members:: >>> list(Color.RED) [<Color.RED: 1>] >>> list(purple) [<Color.RED: 1>, <Color.BLUE: 4>] + .. versionchanged:: 3.11 + + Aliases are no longer returned during iteration. + .. method:: __len__(self): Returns number of members in flag:: @@ -543,11 +562,11 @@ Data Types Using :class:`auto` with :class:`Flag` results in integers that are powers of two, starting with ``1``. - .. versionchanged:: 3.11 The *repr()* of zero-valued flags has changed. It + .. versionchanged:: 3.11 The *repr()* of zero-valued flags has changed. It is now:: - >>> Color(0) # doctest: +SKIP - <Color: 0> + >>> Color(0) # doctest: +SKIP + <Color: 0> .. class:: IntFlag @@ -559,6 +578,7 @@ Data Types ... RED = auto() ... GREEN = auto() ... BLUE = auto() + ... >>> Color.RED & 2 <Color: 0> >>> Color.RED | 2 @@ -585,9 +605,15 @@ Data Types Using :class:`auto` with :class:`IntFlag` results in integers that are powers of two, starting with ``1``. - .. versionchanged:: 3.11 :meth:`__str__` is now :func:`int.__str__` to - better support the *replacement of existing constants* use-case. - :meth:`__format__` was already :func:`int.__format__` for that same reason. + .. versionchanged:: 3.11 + + :meth:`~object.__str__` is now :meth:`!int.__str__` to better support the + *replacement of existing constants* use-case. :meth:`~object.__format__` was + already :meth:`!int.__format__` for that same reason. + + Inversion of an :class:`!IntFlag` now returns a positive value that is the + union of all flags not in the given flag, rather than a negative value. + This matches the existing :class:`Flag` behavior. .. class:: ReprEnum @@ -597,7 +623,7 @@ Data Types * :meth:`!int.__str__` for :class:`IntEnum` and :class:`IntFlag` * :meth:`!str.__str__` for :class:`StrEnum` - Inherit from :class:`!ReprEnum` to keep the :class:`str() <str> / :func:`format` + Inherit from :class:`!ReprEnum` to keep the :class:`str() <str>` / :func:`format` of the mixed-in data type instead of using the :class:`Enum`-default :meth:`str() <Enum.__str__>`. @@ -643,7 +669,7 @@ Data Types .. attribute:: NAMED_FLAGS Ensure that any flag groups/masks contain only named flags -- useful when - values are specified instead of being generated by :func:`auto` + values are specified instead of being generated by :func:`auto`:: >>> from enum import Flag, verify, NAMED_FLAGS >>> @verify(NAMED_FLAGS) @@ -678,6 +704,7 @@ Data Types ... RED = auto() ... GREEN = auto() ... BLUE = auto() + ... >>> StrictFlag(2**2 + 2**4) Traceback (most recent call last): ... @@ -695,6 +722,7 @@ Data Types ... RED = auto() ... GREEN = auto() ... BLUE = auto() + ... >>> ConformFlag(2**2 + 2**4) <ConformFlag.BLUE: 4> @@ -708,6 +736,7 @@ Data Types ... RED = auto() ... GREEN = auto() ... BLUE = auto() + ... >>> EjectFlag(2**2 + 2**4) 20 @@ -721,6 +750,7 @@ Data Types ... RED = auto() ... GREEN = auto() ... BLUE = auto() + ... >>> KeepFlag(2**2 + 2**4) <KeepFlag.BLUE|16: 20> @@ -778,7 +808,21 @@ Utilities and Decorators For *Enum* and *IntEnum* that appropriate value will be the last value plus one; for *Flag* and *IntFlag* it will be the first power-of-two greater than the last value; for *StrEnum* it will be the lower-cased version of the - member's name. + member's name. Care must be taken if mixing *auto()* with manually specified + values. + + *auto* instances are only resolved when at the top level of an assignment: + + * ``FIRST = auto()`` will work (auto() is replaced with ``1``); + * ``SECOND = auto(), -2`` will work (auto is replaced with ``2``, so ``2, -2`` is + used to create the ``SECOND`` enum member; + * ``THREE = [auto(), -3]`` will *not* work (``<auto instance>, -3`` is used to + create the ``THREE`` enum member) + + .. versionchanged:: 3.11.1 + + In prior versions, ``auto()`` had to be the only thing + on the assignment line to work properly. ``_generate_next_value_`` can be overridden to customize the values used by *auto*. @@ -861,23 +905,23 @@ Notes :class:`IntEnum`, :class:`StrEnum`, and :class:`IntFlag` - These three enum types are designed to be drop-in replacements for existing - integer- and string-based values; as such, they have extra limitations: + These three enum types are designed to be drop-in replacements for existing + integer- and string-based values; as such, they have extra limitations: - - ``__str__`` uses the value and not the name of the enum member + - ``__str__`` uses the value and not the name of the enum member - - ``__format__``, because it uses ``__str__``, will also use the value of - the enum member instead of its name + - ``__format__``, because it uses ``__str__``, will also use the value of + the enum member instead of its name - If you do not need/want those limitations, you can either create your own - base class by mixing in the ``int`` or ``str`` type yourself:: + If you do not need/want those limitations, you can either create your own + base class by mixing in the ``int`` or ``str`` type yourself:: - >>> from enum import Enum - >>> class MyIntEnum(int, Enum): - ... pass + >>> from enum import Enum + >>> class MyIntEnum(int, Enum): + ... pass or you can reassign the appropriate :meth:`str`, etc., in your enum:: - >>> from enum import IntEnum - >>> class MyIntEnum(IntEnum): - ... __str__ = IntEnum.__str__ + >>> from enum import IntEnum + >>> class MyIntEnum(IntEnum): + ... __str__ = IntEnum.__str__ diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index fc856277d67b2e..1217b817b4e843 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -934,21 +934,42 @@ their subgroups based on the types of the contained exceptions. .. method:: derive(excs) - Returns an exception group with the same :attr:`message`, - :attr:`__traceback__`, :attr:`__cause__`, :attr:`__context__` - and :attr:`__notes__` but which wraps the exceptions in ``excs``. + Returns an exception group with the same :attr:`message`, but which + wraps the exceptions in ``excs``. This method is used by :meth:`subgroup` and :meth:`split`. A subclass needs to override it in order to make :meth:`subgroup` and :meth:`split` return instances of the subclass rather - than :exc:`ExceptionGroup`. :: + than :exc:`ExceptionGroup`. + + :meth:`subgroup` and :meth:`split` copy the :attr:`__traceback__`, + :attr:`__cause__`, :attr:`__context__` and :attr:`__notes__` fields from + the original exception group to the one returned by :meth:`derive`, so + these fields do not need to be updated by :meth:`derive`. :: >>> class MyGroup(ExceptionGroup): ... def derive(self, exc): ... return MyGroup(self.message, exc) ... - >>> MyGroup("eg", [ValueError(1), TypeError(2)]).split(TypeError) - (MyGroup('eg', [TypeError(2)]), MyGroup('eg', [ValueError(1)])) + >>> e = MyGroup("eg", [ValueError(1), TypeError(2)]) + >>> e.add_note("a note") + >>> e.__context__ = Exception("context") + >>> e.__cause__ = Exception("cause") + >>> try: + ... raise e + ... except Exception as e: + ... exc = e + ... + >>> match, rest = exc.split(ValueError) + >>> exc, exc.__context__, exc.__cause__, exc.__notes__ + (MyGroup('eg', [ValueError(1), TypeError(2)]), Exception('context'), Exception('cause'), ['a note']) + >>> match, match.__context__, match.__cause__, match.__notes__ + (MyGroup('eg', [ValueError(1)]), Exception('context'), Exception('cause'), ['a note']) + >>> rest, rest.__context__, rest.__cause__, rest.__notes__ + (MyGroup('eg', [TypeError(2)]), Exception('context'), Exception('cause'), ['a note']) + >>> exc.__traceback__ is match.__traceback__ is rest.__traceback__ + True + Note that :exc:`BaseExceptionGroup` defines :meth:`__new__`, so subclasses that need a different constructor signature need to @@ -965,6 +986,10 @@ their subgroups based on the types of the contained exceptions. def derive(self, excs): return Errors(excs, self.exit_code) + Like :exc:`ExceptionGroup`, any subclass of :exc:`BaseExceptionGroup` which + is also a subclass of :exc:`Exception` can only wrap instances of + :exc:`Exception`. + .. versionadded:: 3.11 diff --git a/Doc/library/fnmatch.rst b/Doc/library/fnmatch.rst index 9163da57c7b999..46bf0fc2848058 100644 --- a/Doc/library/fnmatch.rst +++ b/Doc/library/fnmatch.rst @@ -48,7 +48,7 @@ patterns. Also note that :func:`functools.lru_cache` with the *maxsize* of 32768 is used to cache the compiled regex patterns in the following functions: :func:`fnmatch`, -:func:`fnmatchcase`, :func:`filter`. +:func:`fnmatchcase`, :func:`.filter`. .. function:: fnmatch(filename, pattern) diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 8108c98332e964..2110990d188973 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -54,14 +54,14 @@ are always available. They are listed here in alphabetical order. .. |func-bytearray| replace:: ``bytearray()`` .. |func-bytes| replace:: ``bytes()`` -.. function:: abs(x, /) +.. function:: abs(x) Return the absolute value of a number. The argument may be an integer, a floating point number, or an object implementing :meth:`__abs__`. If the argument is a complex number, its magnitude is returned. -.. function:: aiter(async_iterable, /) +.. function:: aiter(async_iterable) Return an :term:`asynchronous iterator` for an :term:`asynchronous iterable`. Equivalent to calling ``x.__aiter__()``. @@ -70,7 +70,7 @@ are always available. They are listed here in alphabetical order. .. versionadded:: 3.10 -.. function:: all(iterable, /) +.. function:: all(iterable) Return ``True`` if all elements of the *iterable* are true (or if the iterable is empty). Equivalent to:: @@ -82,8 +82,8 @@ are always available. They are listed here in alphabetical order. return True -.. awaitablefunction:: anext(async_iterator, /) - anext(async_iterator, default, /) +.. awaitablefunction:: anext(async_iterator) + anext(async_iterator, default) When awaited, return the next item from the given :term:`asynchronous iterator`, or *default* if given and the iterator is exhausted. @@ -98,7 +98,7 @@ are always available. They are listed here in alphabetical order. .. versionadded:: 3.10 -.. function:: any(iterable, /) +.. function:: any(iterable) Return ``True`` if any element of the *iterable* is true. If the iterable is empty, return ``False``. Equivalent to:: @@ -110,7 +110,7 @@ are always available. They are listed here in alphabetical order. return False -.. function:: ascii(object, /) +.. function:: ascii(object) As :func:`repr`, return a string containing a printable representation of an object, but escape the non-ASCII characters in the string returned by @@ -118,7 +118,7 @@ are always available. They are listed here in alphabetical order. similar to that returned by :func:`repr` in Python 2. -.. function:: bin(x, /) +.. function:: bin(x) Convert an integer number to a binary string prefixed with "0b". The result is a valid Python expression. If *x* is not a Python :class:`int` object, it @@ -140,7 +140,7 @@ are always available. They are listed here in alphabetical order. See also :func:`format` for more information. -.. class:: bool(x=False, /) +.. class:: bool(x=False) Return a Boolean value, i.e. one of ``True`` or ``False``. *x* is converted using the standard :ref:`truth testing procedure <truth>`. If *x* is false @@ -222,7 +222,7 @@ are always available. They are listed here in alphabetical order. See also :ref:`binaryseq`, :ref:`typebytes`, and :ref:`bytes-methods`. -.. function:: callable(object, /) +.. function:: callable(object) Return :const:`True` if the *object* argument appears callable, :const:`False` if not. If this returns ``True``, it is still possible that a @@ -235,7 +235,7 @@ are always available. They are listed here in alphabetical order. in Python 3.2. -.. function:: chr(i, /) +.. function:: chr(i) Return the string representing a character whose Unicode code point is the integer *i*. For example, ``chr(97)`` returns the string ``'a'``, while @@ -364,7 +364,7 @@ are always available. They are listed here in alphabetical order. .. class:: complex(real=0, imag=0) - complex(string, /) + complex(string) Return a complex number with the value *real* + *imag*\*1j or convert a string or number to a complex number. If the first parameter is a string, it will @@ -397,7 +397,7 @@ are always available. They are listed here in alphabetical order. :meth:`__float__` are not defined. -.. function:: delattr(object, name, /) +.. function:: delattr(object, name) This is a relative of :func:`setattr`. The arguments are an object and a string. The string must be the name of one of the object's attributes. The @@ -408,8 +408,8 @@ are always available. They are listed here in alphabetical order. .. _func-dict: .. class:: dict(**kwarg) - dict(mapping, /, **kwarg) - dict(iterable, /, **kwarg) + dict(mapping, **kwarg) + dict(iterable, **kwarg) :noindex: Create a new dictionary. The :class:`dict` object is the dictionary class. @@ -420,7 +420,7 @@ are always available. They are listed here in alphabetical order. .. function:: dir() - dir(object, /) + dir(object) Without arguments, return the list of names in the current local scope. With an argument, attempt to return a list of valid attributes for that object. @@ -462,6 +462,7 @@ are always available. They are listed here in alphabetical order. >>> class Shape: ... def __dir__(self): ... return ['area', 'perimeter', 'location'] + ... >>> s = Shape() >>> dir(s) ['area', 'location', 'perimeter'] @@ -476,7 +477,7 @@ are always available. They are listed here in alphabetical order. class. -.. function:: divmod(a, b, /) +.. function:: divmod(a, b) Take two (non-complex) numbers as arguments and return a pair of numbers consisting of their quotient and remainder when using integer division. With @@ -619,7 +620,7 @@ are always available. They are listed here in alphabetical order. Added the *closure* parameter. -.. function:: filter(function, iterable, /) +.. function:: filter(function, iterable) Construct an iterator from those elements of *iterable* for which *function* returns true. *iterable* may be either a sequence, a container which @@ -636,7 +637,7 @@ are always available. They are listed here in alphabetical order. elements of *iterable* for which *function* returns false. -.. class:: float(x=0.0, /) +.. class:: float(x=0.0) .. index:: single: NaN @@ -704,7 +705,7 @@ are always available. They are listed here in alphabetical order. single: __format__ single: string; format() (built-in function) -.. function:: format(value, format_spec="", /) +.. function:: format(value, format_spec="") Convert a *value* to a "formatted" representation, as controlled by *format_spec*. The interpretation of *format_spec* will depend on the type @@ -727,7 +728,7 @@ are always available. They are listed here in alphabetical order. .. _func-frozenset: -.. class:: frozenset(iterable=set(), /) +.. class:: frozenset(iterable=set()) :noindex: Return a new :class:`frozenset` object, optionally with elements taken from @@ -739,8 +740,8 @@ are always available. They are listed here in alphabetical order. module. -.. function:: getattr(object, name, /) - getattr(object, name, default, /) +.. function:: getattr(object, name) + getattr(object, name, default) Return the value of the named attribute of *object*. *name* must be a string. If the string is the name of one of the object's attributes, the result is the @@ -764,7 +765,7 @@ are always available. They are listed here in alphabetical order. regardless of where the function is called. -.. function:: hasattr(object, name, /) +.. function:: hasattr(object, name) The arguments are an object and a string. The result is ``True`` if the string is the name of one of the object's attributes, ``False`` if not. (This @@ -772,7 +773,7 @@ are always available. They are listed here in alphabetical order. raises an :exc:`AttributeError` or not.) -.. function:: hash(object, /) +.. function:: hash(object) Return the hash value of the object (if it has one). Hash values are integers. They are used to quickly compare dictionary keys during a @@ -807,7 +808,7 @@ are always available. They are listed here in alphabetical order. signatures for callables are now more comprehensive and consistent. -.. function:: hex(x, /) +.. function:: hex(x) Convert an integer number to a lowercase hexadecimal string prefixed with "0x". If *x* is not a Python :class:`int` object, it has to define an @@ -839,7 +840,7 @@ are always available. They are listed here in alphabetical order. :meth:`float.hex` method. -.. function:: id(object, /) +.. function:: id(object) Return the "identity" of an object. This is an integer which is guaranteed to be unique and constant for this object during its lifetime. @@ -852,7 +853,7 @@ are always available. They are listed here in alphabetical order. .. function:: input() - input(prompt, /) + input(prompt) If the *prompt* argument is present, it is written to standard output without a trailing newline. The function then reads a line from input, converts it @@ -878,8 +879,8 @@ are always available. They are listed here in alphabetical order. with the result after successfully reading input. -.. class:: int(x=0, /) - int(x, /, base=10) +.. class:: int(x=0) + int(x, base=10) Return an integer object constructed from a number or string *x*, or return ``0`` if no arguments are given. If *x* defines :meth:`__int__`, @@ -930,7 +931,7 @@ are always available. They are listed here in alphabetical order. See the :ref:`integer string conversion length limitation <int_max_str_digits>` documentation. -.. function:: isinstance(object, classinfo, /) +.. function:: isinstance(object, classinfo) Return ``True`` if the *object* argument is an instance of the *classinfo* argument, or of a (direct, indirect, or :term:`virtual <abstract base @@ -947,7 +948,7 @@ are always available. They are listed here in alphabetical order. *classinfo* can be a :ref:`types-union`. -.. function:: issubclass(class, classinfo, /) +.. function:: issubclass(class, classinfo) Return ``True`` if *class* is a subclass (direct, indirect, or :term:`virtual <abstract base class>`) of *classinfo*. A @@ -961,8 +962,8 @@ are always available. They are listed here in alphabetical order. *classinfo* can be a :ref:`types-union`. -.. function:: iter(object, /) - iter(object, sentinel, /) +.. function:: iter(object) + iter(object, sentinel) Return an :term:`iterator` object. The first argument is interpreted very differently depending on the presence of the second argument. Without a @@ -989,7 +990,7 @@ are always available. They are listed here in alphabetical order. process_block(block) -.. function:: len(s, /) +.. function:: len(s) Return the length (the number of items) of an object. The argument may be a sequence (such as a string, bytes, tuple, list, or range) or a collection @@ -1003,7 +1004,7 @@ are always available. They are listed here in alphabetical order. .. _func-list: .. class:: list() - list(iterable, /) + list(iterable) :noindex: Rather than being a function, :class:`list` is actually a mutable @@ -1021,7 +1022,7 @@ are always available. They are listed here in alphabetical order. The contents of this dictionary should not be modified; changes may not affect the values of local and free variables used by the interpreter. -.. function:: map(function, iterable, /, *iterables) +.. function:: map(function, iterable, *iterables) Return an iterator that applies *function* to every item of *iterable*, yielding the results. If additional *iterables* arguments are passed, @@ -1031,9 +1032,9 @@ are always available. They are listed here in alphabetical order. already arranged into argument tuples, see :func:`itertools.starmap`\. -.. function:: max(iterable, /, *, key=None) - max(iterable, /, *, default, key=None) - max(arg1, arg2, /, *args, key=None) +.. function:: max(iterable, *, key=None) + max(iterable, *, default, key=None) + max(arg1, arg2, *args, key=None) Return the largest item in an iterable or the largest of two or more arguments. @@ -1069,9 +1070,9 @@ are always available. They are listed here in alphabetical order. :ref:`typememoryview` for more information. -.. function:: min(iterable, /, *, key=None) - min(iterable, /, *, default, key=None) - min(arg1, arg2, /, *args, key=None) +.. function:: min(iterable, *, key=None) + min(iterable, *, default, key=None) + min(arg1, arg2, *args, key=None) Return the smallest item in an iterable or the smallest of two or more arguments. @@ -1099,8 +1100,8 @@ are always available. They are listed here in alphabetical order. The *key* can be ``None``. -.. function:: next(iterator, /) - next(iterator, default, /) +.. function:: next(iterator) + next(iterator, default) Retrieve the next item from the :term:`iterator` by calling its :meth:`~iterator.__next__` method. If *default* is given, it is returned @@ -1119,7 +1120,7 @@ are always available. They are listed here in alphabetical order. assign arbitrary attributes to an instance of the :class:`object` class. -.. function:: oct(x, /) +.. function:: oct(x) Convert an integer number to an octal string prefixed with "0o". The result is a valid Python expression. If *x* is not a Python :class:`int` object, it @@ -1371,7 +1372,7 @@ are always available. They are listed here in alphabetical order. .. versionchanged:: 3.11 The ``'U'`` mode has been removed. -.. function:: ord(c, /) +.. function:: ord(c) Given a string representing one Unicode character, return an integer representing the Unicode code point of that character. For example, @@ -1419,7 +1420,7 @@ are always available. They are listed here in alphabetical order. supported. -.. function:: print(*objects, sep=' ', end='\n', file=sys.stdout, flush=False) +.. function:: print(*objects, sep=' ', end='\n', file=None, flush=False) Print *objects* to the text stream *file*, separated by *sep* and followed by *end*. *sep*, *end*, *file*, and *flush*, if present, must be given as keyword @@ -1522,15 +1523,15 @@ are always available. They are listed here in alphabetical order. .. _func-range: -.. class:: range(stop, /) - range(start, stop, step=1, /) +.. class:: range(stop) + range(start, stop, step=1) :noindex: Rather than being a function, :class:`range` is actually an immutable sequence type, as documented in :ref:`typesseq-range` and :ref:`typesseq`. -.. function:: repr(object, /) +.. function:: repr(object) Return a string containing a printable representation of an object. For many types, this function makes an attempt to return a string that would yield an @@ -1543,7 +1544,7 @@ are always available. They are listed here in alphabetical order. :exc:`RuntimeError`. -.. function:: reversed(seq, /) +.. function:: reversed(seq) Return a reverse :term:`iterator`. *seq* must be an object which has a :meth:`__reversed__` method or supports the sequence protocol (the @@ -1580,7 +1581,7 @@ are always available. They are listed here in alphabetical order. .. _func-set: .. class:: set() - set(iterable, /) + set(iterable) :noindex: Return a new :class:`set` object, optionally with elements taken from @@ -1592,7 +1593,7 @@ are always available. They are listed here in alphabetical order. module. -.. function:: setattr(object, name, value, /) +.. function:: setattr(object, name, value) This is the counterpart of :func:`getattr`. The arguments are an object, a string, and an arbitrary value. The string may name an existing attribute or a @@ -1614,8 +1615,8 @@ are always available. They are listed here in alphabetical order. :func:`setattr`. -.. class:: slice(stop, /) - slice(start, stop, step=1, /) +.. class:: slice(stop) + slice(start, stop, step=1) Return a :term:`slice` object representing the set of indices specified by ``range(start, stop, step)``. The *start* and *step* arguments default to @@ -1733,7 +1734,7 @@ are always available. They are listed here in alphabetical order. The *start* parameter can be specified as a keyword argument. .. class:: super() - super(type, object_or_type=None, /) + super(type, object_or_type=None) Return a proxy object that delegates method calls to a parent or sibling class of *type*. This is useful for accessing inherited methods that have @@ -1804,15 +1805,15 @@ are always available. They are listed here in alphabetical order. .. _func-tuple: .. class:: tuple() - tuple(iterable, /) + tuple(iterable) :noindex: Rather than being a function, :class:`tuple` is actually an immutable sequence type, as documented in :ref:`typesseq-tuple` and :ref:`typesseq`. -.. class:: type(object, /) - type(name, bases, dict, /, **kwds) +.. class:: type(object) + type(name, bases, dict, **kwds) .. index:: object: type @@ -1853,7 +1854,7 @@ are always available. They are listed here in alphabetical order. longer use the one-argument form to get the type of an object. .. function:: vars() - vars(object, /) + vars(object) Return the :attr:`~object.__dict__` attribute for a module, class, instance, or any other object with a :attr:`~object.__dict__` attribute. diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index 8e47312fe77bf5..f8d10c0c295c7a 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -497,6 +497,7 @@ update the hash: >>> h = blake2b() >>> for item in items: ... h.update(item) + ... >>> h.hexdigest() '6ff843ba685842aa82031d3f53c48b66326df7639a63d128974c5c14f31a0f33343a8c65551134ed1ae0f2b0dd2bb495dc81039e3eeb0aa1bb0388bbeac29183' diff --git a/Doc/library/http.rst b/Doc/library/http.rst index 9d002dc33ccb8d..5e1912716e5319 100644 --- a/Doc/library/http.rst +++ b/Doc/library/http.rst @@ -171,16 +171,25 @@ Property Indicates that Details Usage:: >>> from http import HTTPMethod - >>> HTTMethod.GET - HTTMethod.GET - >>> HTTMethod.GET == 'GET' + >>> + >>> HTTPMethod.GET + <HTTPMethod.GET> + >>> HTTPMethod.GET == 'GET' True - >>> HTTMethod.GET.value + >>> HTTPMethod.GET.value 'GET' - >>> HTTMethod.GET.description - 'Transfer a current representation of the target resource.' + >>> HTTPMethod.GET.description + 'Retrieve the target.' >>> list(HTTPMethod) - [HTTPMethod.GET, HTTPMethod.HEAD, ...] + [<HTTPMethod.CONNECT>, + <HTTPMethod.DELETE>, + <HTTPMethod.GET>, + <HTTPMethod.HEAD>, + <HTTPMethod.OPTIONS>, + <HTTPMethod.PATCH>, + <HTTPMethod.POST>, + <HTTPMethod.PUT>, + <HTTPMethod.TRACE>] .. _http-methods: diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst index 81b6bf5373b495..3290b9beab3ed9 100644 --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -512,3 +512,12 @@ Security Considerations :class:`SimpleHTTPRequestHandler` will follow symbolic links when handling requests, this makes it possible for files outside of the specified directory to be served. + +Earlier versions of Python did not scrub control characters from the +log messages emitted to stderr from ``python -m http.server`` or the +default :class:`BaseHTTPRequestHandler` ``.log_message`` +implementation. This could allow remote clients connecting to your +server to send nefarious control codes to your terminal. + +.. versionadded:: 3.12 + Control characters are scrubbed in stderr logs. diff --git a/Doc/library/importlib.resources.abc.rst b/Doc/library/importlib.resources.abc.rst index 57fffe0d905cbe..7747e89a833c02 100644 --- a/Doc/library/importlib.resources.abc.rst +++ b/Doc/library/importlib.resources.abc.rst @@ -145,7 +145,7 @@ :class:`importlib.resources.abc.ResourceReader` and provides concrete implementations of the :class:`importlib.resources.abc.ResourceReader`'s abstract methods. Therefore, any loader supplying - :class:`importlib.abc.TraversableReader` also supplies ResourceReader. + :class:`importlib.abc.TraversableResources` also supplies ResourceReader. Loaders that wish to support resource reading are expected to implement this interface. diff --git a/Doc/library/importlib.resources.rst b/Doc/library/importlib.resources.rst index 827e7d8d5aced4..399191301a3614 100644 --- a/Doc/library/importlib.resources.rst +++ b/Doc/library/importlib.resources.rst @@ -11,9 +11,17 @@ .. versionadded:: 3.7 This module leverages Python's import system to provide access to *resources* -within *packages*. If you can import a package, you can access resources -within that package. Resources can be opened or read, in either binary or -text mode. +within *packages*. + +"Resources" are file-like resources associated with a module or package in +Python. The resources may be contained directly in a package or within a +subdirectory contained in that package. Resources may be text or binary. As a +result, Python module sources (.py) of a package and compilation artifacts +(pycache) are technically de-facto resources of that package. In practice, +however, resources are primarily those non-Python artifacts exposed +specifically by the package author. + +Resources can be opened or read in either binary or text mode. Resources are roughly akin to files inside directories, though it's important to keep in mind that this is just a metaphor. Resources and packages **do diff --git a/Doc/library/index.rst b/Doc/library/index.rst index 7d2002b37df12c..d064b680f9aaa4 100644 --- a/Doc/library/index.rst +++ b/Doc/library/index.rst @@ -27,8 +27,8 @@ as a collection of packages, so it may be necessary to use the packaging tools provided with the operating system to obtain some or all of the optional components. -In addition to the standard library, there is a growing collection of -several thousand components (from individual programs and modules to +In addition to the standard library, there is an active collection of +hundreds of thousands of components (from individual programs and modules to packages and entire application development frameworks), available from the `Python Package Index <https://pypi.org>`_. diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 44f1ae04c9e39e..6705577551dcc5 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -32,7 +32,7 @@ The :func:`getmembers` function retrieves the members of an object such as a class or module. The functions whose names begin with "is" are mainly provided as convenient choices for the second argument to :func:`getmembers`. They also help you determine when you can expect to find the following special -attributes: +attributes (see :ref:`import-mod-attrs` for module attributes): .. this function name is too big to fit in the ascii-art table below .. |coroutine-origin-link| replace:: :func:`sys.set_coroutine_origin_tracking_depth` @@ -40,11 +40,6 @@ attributes: +-----------+-------------------+---------------------------+ | Type | Attribute | Description | +===========+===================+===========================+ -| module | __doc__ | documentation string | -+-----------+-------------------+---------------------------+ -| | __file__ | filename (missing for | -| | | built-in modules) | -+-----------+-------------------+---------------------------+ | class | __doc__ | documentation string | +-----------+-------------------+---------------------------+ | | __name__ | name with which this | @@ -720,6 +715,7 @@ function. >>> def test(a, b): ... pass + ... >>> sig = signature(test) >>> new_sig = sig.replace(return_annotation="new return anno") >>> str(new_sig) @@ -1059,6 +1055,7 @@ Classes and functions >>> from inspect import getcallargs >>> def f(a, b=1, *pos, **named): ... pass + ... >>> getcallargs(f, 1, 2, 3) == {'a': 1, 'named': {}, 'b': 2, 'pos': (3,)} True >>> getcallargs(f, a=2, x=4) == {'a': 2, 'named': {'x': 4}, 'b': 1, 'pos': ()} diff --git a/Doc/library/intro.rst b/Doc/library/intro.rst index 1020924038e91f..5a4c9b8b16ab3b 100644 --- a/Doc/library/intro.rst +++ b/Doc/library/intro.rst @@ -114,7 +114,7 @@ DOM APIs as well as limited networking capabilities with JavaScript's .. _WebAssembly: https://webassembly.org/ .. _Emscripten: https://emscripten.org/ -.. _Emscripten Networking: https://emscripten.org/docs/porting/networking.html> +.. _Emscripten Networking: https://emscripten.org/docs/porting/networking.html .. _WASI: https://wasi.dev/ .. _wasmtime: https://wasmtime.dev/ .. _Pyodide: https://pyodide.org/ diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 0b5978505a9672..624d2430ac20d7 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -52,7 +52,7 @@ Iterator Arguments Results Iterator Arguments Results Example ============================ ============================ ================================================= ============================================================= :func:`accumulate` p [,func] p0, p0+p1, p0+p1+p2, ... ``accumulate([1,2,3,4,5]) --> 1 3 6 10 15`` -:func:`batched` p, n [p0, p1, ..., p_n-1], ... ``batched('ABCDEFG', n=3) --> ABC DEF G`` +:func:`batched` p, n (p0, p1, ..., p_n-1), ... ``batched('ABCDEFG', n=3) --> ABC DEF G`` :func:`chain` p, q, ... p0, p1, ... plast, q0, q1, ... ``chain('ABC', 'DEF') --> A B C D E F`` :func:`chain.from_iterable` iterable p0, p1, ... plast, q0, q1, ... ``chain.from_iterable(['ABC', 'DEF']) --> A B C D E F`` :func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) --> A C E F`` @@ -166,11 +166,11 @@ loops that truncate the stream. .. function:: batched(iterable, n) - Batch data from the *iterable* into lists of length *n*. The last + Batch data from the *iterable* into tuples of length *n*. The last batch may be shorter than *n*. - Loops over the input iterable and accumulates data into lists up to - size *n*. The input is consumed lazily, just enough to fill a list. + Loops over the input iterable and accumulates data into tuples up to + size *n*. The input is consumed lazily, just enough to fill a batch. The result is yielded as soon as the batch is full or when the input iterable is exhausted: @@ -179,14 +179,14 @@ loops that truncate the stream. >>> flattened_data = ['roses', 'red', 'violets', 'blue', 'sugar', 'sweet'] >>> unflattened = list(batched(flattened_data, 2)) >>> unflattened - [['roses', 'red'], ['violets', 'blue'], ['sugar', 'sweet']] + [('roses', 'red'), ('violets', 'blue'), ('sugar', 'sweet')] >>> for batch in batched('ABCDEFG', 3): ... print(batch) ... - ['A', 'B', 'C'] - ['D', 'E', 'F'] - ['G'] + ('A', 'B', 'C') + ('D', 'E', 'F') + ('G',) Roughly equivalent to:: @@ -195,7 +195,7 @@ loops that truncate the stream. if n < 1: raise ValueError('n must be at least one') it = iter(iterable) - while (batch := list(islice(it, n))): + while (batch := tuple(islice(it, n))): yield batch .. versionadded:: 3.12 diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst index 5bb09b68ca138c..1e46b0f3bf5b03 100644 --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -147,12 +147,12 @@ The :mod:`locale` module defines the following exception and functions: | ``CHAR_MAX`` | Nothing is specified in this locale. | +--------------+-----------------------------------------+ - The function sets temporarily the ``LC_CTYPE`` locale to the ``LC_NUMERIC`` + The function temporarily sets the ``LC_CTYPE`` locale to the ``LC_NUMERIC`` locale or the ``LC_MONETARY`` locale if locales are different and numeric or monetary strings are non-ASCII. This temporary change affects other threads. .. versionchanged:: 3.7 - The function now sets temporarily the ``LC_CTYPE`` locale to the + The function now temporarily sets the ``LC_CTYPE`` locale to the ``LC_NUMERIC`` locale in some cases. @@ -227,16 +227,18 @@ The :mod:`locale` module defines the following exception and functions: Get a regular expression that can be used with the regex function to recognize a positive response to a yes/no question. - .. note:: - - The expression is in the syntax suitable for the :c:func:`regex` function - from the C library, which might differ from the syntax used in :mod:`re`. - .. data:: NOEXPR Get a regular expression that can be used with the regex(3) function to recognize a negative response to a yes/no question. + .. note:: + + The regular expressions for :const:`YESEXPR` and + :const:`NOEXPR` use syntax suitable for the + :c:func:`regex` function from the C library, which might + differ from the syntax used in :mod:`re`. + .. data:: CRNCYSTR Get the currency symbol, preceded by "-" if the symbol should appear before @@ -399,7 +401,7 @@ The :mod:`locale` module defines the following exception and functions: Formats a number *val* according to the current :const:`LC_NUMERIC` setting. The format follows the conventions of the ``%`` operator. For floating point - values, the decimal point is modified if appropriate. If *grouping* is true, + values, the decimal point is modified if appropriate. If *grouping* is ``True``, also takes the grouping into account. If *monetary* is true, the conversion uses monetary thousands separator and @@ -417,12 +419,14 @@ The :mod:`locale` module defines the following exception and functions: Formats a number *val* according to the current :const:`LC_MONETARY` settings. The returned string includes the currency symbol if *symbol* is true, which is - the default. If *grouping* is true (which is not the default), grouping is done - with the value. If *international* is true (which is not the default), the + the default. If *grouping* is ``True`` (which is not the default), grouping is done + with the value. If *international* is ``True`` (which is not the default), the international currency symbol is used. - Note that this function will not work with the 'C' locale, so you have to set a - locale via :func:`setlocale` first. + .. note:: + + This function will not work with the 'C' locale, so you have to set a + locale via :func:`setlocale` first. .. function:: str(float) @@ -609,4 +613,3 @@ applications that link with additional C libraries which internally invoke :c:func:`gettext` or :c:func:`dcgettext`. For these applications, it may be necessary to bind the text domain, so that the libraries can properly locate their message catalogs. - diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 5516084780673f..b5ceeb796f8f2f 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -1089,10 +1089,14 @@ Miscellaneous .. versionchanged:: 3.11 Accepts a :term:`path-like object`. -.. function:: set_start_method(method) +.. function:: set_start_method(method, force=False) Set the method which should be used to start child processes. - *method* can be ``'fork'``, ``'spawn'`` or ``'forkserver'``. + The *method* argument can be ``'fork'``, ``'spawn'`` or ``'forkserver'``. + Raises :exc:`RuntimeError` if the start method has already been set and *force* + is not ``True``. If *method* is ``None`` and *force* is ``True`` then the start + method is set to ``None``. If *method* is ``None`` and *force* is ``False`` + then the context is set to the default context. Note that this should be called at most once, and it should be protected inside the ``if __name__ == '__main__'`` clause of the diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 6d52a03ba95704..50e089653fe71b 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -266,6 +266,15 @@ the :mod:`glob` module.) Accepts a :term:`path-like object`. +.. function:: isjunction(path) + + Return ``True`` if *path* refers to an :func:`existing <lexists>` directory + entry that is a junction. Always return ``False`` if junctions are not + supported on the current platform. + + .. versionadded:: 3.12 + + .. function:: islink(path) Return ``True`` if *path* refers to an :func:`existing <exists>` directory diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 0f0fb55e315c95..b06f9bbcd831c2 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -2421,7 +2421,7 @@ features: .. function:: remove(path, *, dir_fd=None) Remove (delete) the file *path*. If *path* is a directory, an - :exc:`IsADirectoryError` is raised. Use :func:`rmdir` to remove directories. + :exc:`OSError` is raised. Use :func:`rmdir` to remove directories. If the file does not exist, a :exc:`FileNotFoundError` is raised. This function can support :ref:`paths relative to directory descriptors @@ -2738,6 +2738,17 @@ features: This method can raise :exc:`OSError`, such as :exc:`PermissionError`, but :exc:`FileNotFoundError` is caught and not raised. + .. method:: is_junction() + + Return ``True`` if this entry is a junction (even if broken); + return ``False`` if the entry points to a regular directory, any kind + of file, a symlink, or if it doesn't exist anymore. + + The result is cached on the ``os.DirEntry`` object. Call + :func:`os.path.isjunction` to fetch up-to-date information. + + .. versionadded:: 3.12 + .. method:: stat(*, follow_symlinks=True) Return a :class:`stat_result` object for this entry. This method @@ -2760,8 +2771,8 @@ features: Note that there is a nice correspondence between several attributes and methods of ``os.DirEntry`` and of :class:`pathlib.Path`. In particular, the ``name`` attribute has the same - meaning, as do the ``is_dir()``, ``is_file()``, ``is_symlink()`` - and ``stat()`` methods. + meaning, as do the ``is_dir()``, ``is_file()``, ``is_symlink()``, + ``is_junction()``, and ``stat()`` methods. .. versionadded:: 3.5 @@ -4480,6 +4491,9 @@ written in Python, such as a mail server's external command delivery program. number is zero); the high bit of the low byte is set if a core file was produced. + If there are no children that could be waited for, :exc:`ChildProcessError` + is raised. + :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exit code. @@ -4487,76 +4501,40 @@ written in Python, such as a mail server's external command delivery program. .. seealso:: - :func:`waitpid` can be used to wait for the completion of a specific - child process and has more options. - -.. function:: waitid(idtype, id, options, /) - - Wait for the completion of one or more child processes. - *idtype* can be :data:`P_PID`, :data:`P_PGID`, :data:`P_ALL`, or - :data:`P_PIDFD` on Linux. - *id* specifies the pid to wait on. - *options* is constructed from the ORing of one or more of :data:`WEXITED`, - :data:`WSTOPPED` or :data:`WCONTINUED` and additionally may be ORed with - :data:`WNOHANG` or :data:`WNOWAIT`. The return value is an object - representing the data contained in the :c:type:`siginfo_t` structure, namely: - :attr:`si_pid`, :attr:`si_uid`, :attr:`si_signo`, :attr:`si_status`, - :attr:`si_code` or ``None`` if :data:`WNOHANG` is specified and there are no - children in a waitable state. - - .. availability:: Unix, not Emscripten, not WASI. - - .. versionadded:: 3.3 - -.. data:: P_PID - P_PGID - P_ALL - - These are the possible values for *idtype* in :func:`waitid`. They affect - how *id* is interpreted. - - .. availability:: Unix, not Emscripten, not WASI. - - .. versionadded:: 3.3 - -.. data:: P_PIDFD - - This is a Linux-specific *idtype* that indicates that *id* is a file - descriptor that refers to a process. + The other :func:`!wait*` functions documented below can be used to wait for the + completion of a specific child process and have more options. + :func:`waitpid` is the only one also available on Windows. - .. availability:: Linux >= 5.4 - .. versionadded:: 3.9 - -.. data:: WEXITED - WSTOPPED - WNOWAIT +.. function:: waitid(idtype, id, options, /) - Flags that can be used in *options* in :func:`waitid` that specify what - child signal to wait for. + Wait for the completion of a child process. - .. availability:: Unix, not Emscripten, not WASI. + *idtype* can be :data:`P_PID`, :data:`P_PGID`, :data:`P_ALL`, or (on Linux) :data:`P_PIDFD`. + The interpretation of *id* depends on it; see their individual descriptions. - .. versionadded:: 3.3 + *options* is an OR combination of flags. At least one of :data:`WEXITED`, + :data:`WSTOPPED` or :data:`WCONTINUED` is required; + :data:`WNOHANG` and :data:`WNOWAIT` are additional optional flags. + The return value is an object representing the data contained in the + :c:type:`!siginfo_t` structure with the following attributes: -.. data:: CLD_EXITED - CLD_KILLED - CLD_DUMPED - CLD_TRAPPED - CLD_STOPPED - CLD_CONTINUED + * :attr:`!si_pid` (process ID) + * :attr:`!si_uid` (real user ID of the child) + * :attr:`!si_signo` (always :data:`~signal.SIGCHLD`) + * :attr:`!si_status` (the exit status or signal number, depending on :attr:`!si_code`) + * :attr:`!si_code` (see :data:`CLD_EXITED` for possible values) - These are the possible values for :attr:`si_code` in the result returned by - :func:`waitid`. + If :data:`WNOHANG` is specified and there are no matching children in the + requested state, ``None`` is returned. + Otherwise, if there are no matching children + that could be waited for, :exc:`ChildProcessError` is raised. .. availability:: Unix, not Emscripten, not WASI. .. versionadded:: 3.3 - .. versionchanged:: 3.9 - Added :data:`CLD_KILLED` and :data:`CLD_STOPPED` values. - .. function:: waitpid(pid, options, /) @@ -4574,8 +4552,11 @@ written in Python, such as a mail server's external command delivery program. ``-1``, status is requested for any process in the process group ``-pid`` (the absolute value of *pid*). - An :exc:`OSError` is raised with the value of errno when the syscall - returns -1. + *options* is an OR combination of flags. If it contains :data:`WNOHANG` and + there are no matching children in the requested state, ``(0, 0)`` is + returned. Otherwise, if there are no matching children that could be waited + for, :exc:`ChildProcessError` is raised. Other options that can be used are + :data:`WUNTRACED` and :data:`WCONTINUED`. On Windows: Wait for completion of a process given by process handle *pid*, and return a tuple containing *pid*, and its exit status shifted left by 8 bits @@ -4588,7 +4569,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exit code. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, Windows, not Emscripten, not WASI. .. versionchanged:: 3.5 If the system call is interrupted and the signal handler does not raise an @@ -4601,9 +4582,9 @@ written in Python, such as a mail server's external command delivery program. Similar to :func:`waitpid`, except no process id argument is given and a 3-element tuple containing the child's process id, exit status indication, and resource usage information is returned. Refer to - :mod:`resource`.\ :func:`~resource.getrusage` for details on resource usage - information. The option argument is the same as that provided to - :func:`waitpid` and :func:`wait4`. + :func:`resource.getrusage` for details on resource usage information. The + *options* argument is the same as that provided to :func:`waitpid` and + :func:`wait4`. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exitcode. @@ -4614,10 +4595,10 @@ written in Python, such as a mail server's external command delivery program. .. function:: wait4(pid, options) Similar to :func:`waitpid`, except a 3-element tuple, containing the child's - process id, exit status indication, and resource usage information is returned. - Refer to :mod:`resource`.\ :func:`~resource.getrusage` for details on - resource usage information. The arguments to :func:`wait4` are the same - as those provided to :func:`waitpid`. + process id, exit status indication, and resource usage information is + returned. Refer to :func:`resource.getrusage` for details on resource usage + information. The arguments to :func:`wait4` are the same as those provided + to :func:`waitpid`. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exitcode. @@ -4625,6 +4606,111 @@ written in Python, such as a mail server's external command delivery program. .. availability:: Unix, not Emscripten, not WASI. +.. data:: P_PID + P_PGID + P_ALL + P_PIDFD + + These are the possible values for *idtype* in :func:`waitid`. They affect + how *id* is interpreted: + + * :data:`!P_PID` - wait for the child whose PID is *id*. + * :data:`!P_PGID` - wait for any child whose progress group ID is *id*. + * :data:`!P_ALL` - wait for any child; *id* is ignored. + * :data:`!P_PIDFD` - wait for the child identified by the file descriptor + *id* (a process file descriptor created with :func:`pidfd_open`). + + .. availability:: Unix, not Emscripten, not WASI. + + .. note:: :data:`!P_PIDFD` is only available on Linux >= 5.4. + + .. versionadded:: 3.3 + .. versionadded:: 3.9 + The :data:`!P_PIDFD` constant. + + +.. data:: WCONTINUED + + This *options* flag for :func:`waitpid`, :func:`wait3`, :func:`wait4`, and + :func:`waitid` causes child processes to be reported if they have been + continued from a job control stop since they were last reported. + + .. availability:: Unix, not Emscripten, not WASI. + + +.. data:: WEXITED + + This *options* flag for :func:`waitid` causes child processes that have terminated to + be reported. + + The other ``wait*`` functions always report children that have terminated, + so this option is not available for them. + + .. availability:: Unix, not Emscripten, not WASI. + + .. versionadded:: 3.3 + + +.. data:: WSTOPPED + + This *options* flag for :func:`waitid` causes child processes that have been stopped + by the delivery of a signal to be reported. + + This option is not available for the other ``wait*`` functions. + + .. availability:: Unix, not Emscripten, not WASI. + + .. versionadded:: 3.3 + + +.. data:: WUNTRACED + + This *options* flag for :func:`waitpid`, :func:`wait3`, and :func:`wait4` causes + child processes to also be reported if they have been stopped but their + current state has not been reported since they were stopped. + + This option is not available for :func:`waitid`. + + .. availability:: Unix, not Emscripten, not WASI. + + +.. data:: WNOHANG + + This *options* flag causes :func:`waitpid`, :func:`wait3`, :func:`wait4`, and + :func:`waitid` to return right away if no child process status is available + immediately. + + .. availability:: Unix, not Emscripten, not WASI. + + +.. data:: WNOWAIT + + This *options* flag causes :func:`waitid` to leave the child in a waitable state, so that + a later :func:`!wait*` call can be used to retrieve the child status information again. + + This option is not available for the other ``wait*`` functions. + + .. availability:: Unix, not Emscripten, not WASI. + + +.. data:: CLD_EXITED + CLD_KILLED + CLD_DUMPED + CLD_TRAPPED + CLD_STOPPED + CLD_CONTINUED + + These are the possible values for :attr:`!si_code` in the result returned by + :func:`waitid`. + + .. availability:: Unix, not Emscripten, not WASI. + + .. versionadded:: 3.3 + + .. versionchanged:: 3.9 + Added :data:`CLD_KILLED` and :data:`CLD_STOPPED` values. + + .. function:: waitstatus_to_exitcode(status) Convert a wait status to an exit code. @@ -4657,32 +4743,6 @@ written in Python, such as a mail server's external command delivery program. .. versionadded:: 3.9 -.. data:: WNOHANG - - The option for :func:`waitpid` to return immediately if no child process status - is available immediately. The function returns ``(0, 0)`` in this case. - - .. availability:: Unix, not Emscripten, not WASI. - - -.. data:: WCONTINUED - - This option causes child processes to be reported if they have been continued - from a job control stop since their status was last reported. - - .. availability:: Unix, not Emscripten, not WASI. - - Some Unix systems. - - -.. data:: WUNTRACED - - This option causes child processes to be reported if they have been stopped but - their current state has not been reported since they were stopped. - - .. availability:: Unix, not Emscripten, not WASI. - - The following functions take a process status code as returned by :func:`system`, :func:`wait`, or :func:`waitpid` as a parameter. They may be used to determine the disposition of a process. diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 944963e1e1ae79..6537637f33c70e 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -891,6 +891,14 @@ call fails (for example because the path doesn't exist). other errors (such as permission errors) are propagated. +.. method:: Path.is_junction() + + Return ``True`` if the path points to a junction, and ``False`` for any other + type of file. Currently only Windows supports junctions. + + .. versionadded:: 3.12 + + .. method:: Path.is_mount() Return ``True`` if the path is a :dfn:`mount point`: a point in a diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst index 2d95096f4cb83a..c2189e02656c7a 100644 --- a/Doc/library/profile.rst +++ b/Doc/library/profile.rst @@ -274,7 +274,7 @@ functions: with cProfile.Profile() as pr: # ... do something ... - pr.print_stats() + pr.print_stats() .. versionchanged:: 3.8 Added context manager support. diff --git a/Doc/library/re.rst b/Doc/library/re.rst index 5b304f717b07fa..f7d46586cf7570 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -29,7 +29,7 @@ a literal backslash, one might have to write ``'\\\\'`` as the pattern string, because the regular expression must be ``\\``, and each backslash must be expressed as ``\\`` inside a regular Python string literal. Also, please note that any invalid escape sequences in Python's -usage of the backslash in string literals now generate a :exc:`DeprecationWarning` +usage of the backslash in string literals now generate a :exc:`SyntaxWarning` and in the future this will become a :exc:`SyntaxError`. This behaviour will happen even if it is a valid escape sequence for a regular expression. @@ -483,6 +483,9 @@ The special characters are: some fixed length. Patterns which start with negative lookbehind assertions may match at the beginning of the string being searched. +.. _re-conditional-expression: +.. index:: single: (?(; in regular expressions + ``(?(id/name)yes-pattern|no-pattern)`` Will try to match with ``yes-pattern`` if the group with given *id* or *name* exists, and with ``no-pattern`` if it doesn't. ``no-pattern`` is @@ -970,6 +973,7 @@ Functions >>> def dashrepl(matchobj): ... if matchobj.group(0) == '-': return ' ' ... else: return '-' + ... >>> re.sub('-{1,2}', dashrepl, 'pro----gram-files') 'pro--gram files' >>> re.sub(r'\sAND\s', ' & ', 'Baked Beans And Spam', flags=re.IGNORECASE) @@ -1562,16 +1566,22 @@ search() vs. match() .. sectionauthor:: Fred L. Drake, Jr. <fdrake@acm.org> -Python offers two different primitive operations based on regular expressions: -:func:`re.match` checks for a match only at the beginning of the string, while -:func:`re.search` checks for a match anywhere in the string (this is what Perl -does by default). +Python offers different primitive operations based on regular expressions: + ++ :func:`re.match` checks for a match only at the beginning of the string ++ :func:`re.search` checks for a match anywhere in the string + (this is what Perl does by default) ++ :func:`re.fullmatch` checks for entire string to be a match + For example:: >>> re.match("c", "abcdef") # No match >>> re.search("c", "abcdef") # Match <re.Match object; span=(2, 3), match='c'> + >>> re.fullmatch("p.*n", "python") # Match + <re.Match object; span=(0, 6), match='python'> + >>> re.fullmatch("r.*n", "python") # No match Regular expressions beginning with ``'^'`` can be used with :func:`search` to restrict the match at the beginning of the string:: @@ -1585,8 +1595,8 @@ Note however that in :const:`MULTILINE` mode :func:`match` only matches at the beginning of the string, whereas using :func:`search` with a regular expression beginning with ``'^'`` will match at the beginning of each line. :: - >>> re.match('X', 'A\nB\nX', re.MULTILINE) # No match - >>> re.search('^X', 'A\nB\nX', re.MULTILINE) # Match + >>> re.match("X", "A\nB\nX", re.MULTILINE) # No match + >>> re.search("^X", "A\nB\nX", re.MULTILINE) # Match <re.Match object; span=(4, 5), match='X'> @@ -1663,6 +1673,7 @@ in each word of a sentence except for the first and last characters:: ... inner_word = list(m.group(2)) ... random.shuffle(inner_word) ... return m.group(1) + "".join(inner_word) + m.group(3) + ... >>> text = "Professor Abdolmalek, please report your absences promptly." >>> re.sub(r"(\w)(\w+)(\w)", repl, text) 'Poefsrosr Aealmlobdk, pslaee reorpt your abnseces plmrptoy.' diff --git a/Doc/library/secrets.rst b/Doc/library/secrets.rst index dc8e5f46fb581e..4405dfc0535973 100644 --- a/Doc/library/secrets.rst +++ b/Doc/library/secrets.rst @@ -128,7 +128,9 @@ Other functions .. function:: compare_digest(a, b) - Return ``True`` if strings *a* and *b* are equal, otherwise ``False``, + Return ``True`` if strings or + :term:`bytes-like objects <bytes-like object>` + *a* and *b* are equal, otherwise ``False``, using a "constant-time compare" to reduce the risk of `timing attacks <https://codahale.com/a-lesson-in-timing-attacks/>`_. See :func:`hmac.compare_digest` for additional details. diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst index 2269f50cbaff99..523d1ac5001360 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -4,6 +4,8 @@ .. module:: signal :synopsis: Set handlers for asynchronous events. +**Source code:** :source:`Lib/signal.py` + -------------- This module provides mechanisms to use signal handlers in Python. @@ -362,9 +364,9 @@ The :mod:`signal` module defines the following functions: .. function:: strsignal(signalnum) - Return the system description of the signal *signalnum*, such as - "Interrupt", "Segmentation fault", etc. Returns :const:`None` if the signal - is not recognized. + Returns the description of signal *signalnum*, such as "Interrupt" + for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no + description. Raises :exc:`ValueError` if *signalnum* is invalid. .. versionadded:: 3.8 diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 3f6cb480371617..de2e1aa3868bb3 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -189,8 +189,11 @@ created. Socket addresses are represented as follows: ``(ifname, proto[, pkttype[, hatype[, addr]]])`` where: - *ifname* - String specifying the device name. - - *proto* - An in network-byte-order integer specifying the Ethernet - protocol number. + - *proto* - The Ethernet protocol number. + May be :data:`ETH_P_ALL` to capture all protocols, + one of the :ref:`ETHERTYPE_* constants <socket-ethernet-types>` + or any other Ethernet protocol number. + Value must be in network-byte-order. - *pkttype* - Optional integer specifying the packet type: - ``PACKET_HOST`` (the default) - Packet addressed to the local host. @@ -425,7 +428,14 @@ Constants .. versionchanged:: 3.12 Added ``SO_RTABLE`` and ``SO_USER_COOKIE``. On OpenBSD and FreeBSD respectively those constants can be used in the same way that - ``SO_MARK`` is used on Linux. + ``SO_MARK`` is used on Linux. Also added missing TCP socket options from + Linux: ``TCP_MD5SIG``, ``TCP_THIN_LINEAR_TIMEOUTS``, ``TCP_THIN_DUPACK``, + ``TCP_REPAIR``, ``TCP_REPAIR_QUEUE``, ``TCP_QUEUE_SEQ``, + ``TCP_REPAIR_OPTIONS``, ``TCP_TIMESTAMP``, ``TCP_CC_INFO``, + ``TCP_SAVE_SYN``, ``TCP_SAVED_SYN``, ``TCP_REPAIR_WINDOW``, + ``TCP_FASTOPEN_CONNECT``, ``TCP_ULP``, ``TCP_MD5SIG_EXT``, + ``TCP_FASTOPEN_KEY``, ``TCP_FASTOPEN_NO_COOKIE``, + ``TCP_ZEROCOPY_RECEIVE``, ``TCP_INQ``, ``TCP_TX_DELAY``. .. data:: AF_CAN PF_CAN @@ -508,6 +518,19 @@ Constants .. availability:: Linux >= 2.2. +.. data:: ETH_P_ALL + + :data:`!ETH_P_ALL` can be used in the :class:`~socket.socket` + constructor as *proto* for the :const:`AF_PACKET` family in order to + capture every packet, regardless of protocol. + + For more information, see the :manpage:`packet(7)` manpage. + + .. availability:: Linux. + + .. versionadded:: 3.12 + + .. data:: AF_RDS PF_RDS SOL_RDS @@ -638,6 +661,22 @@ Constants .. versionadded:: 3.12 +.. _socket-ethernet-types: + +.. data:: ETHERTYPE_ARP + ETHERTYPE_IP + ETHERTYPE_IPV6 + ETHERTYPE_VLAN + + `IEEE 802.3 protocol number + <https://www.iana.org/assignments/ieee-802-numbers/ieee-802-numbers.txt>`_. + constants. + + .. availability:: Linux, FreeBSD, macOS. + + .. versionadded:: 3.12 + + Functions ^^^^^^^^^ diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst index 70d56a1dad94f2..ceb962e860042d 100644 --- a/Doc/library/socketserver.rst +++ b/Doc/library/socketserver.rst @@ -96,8 +96,7 @@ synchronous servers of four types:: Note that :class:`UnixDatagramServer` derives from :class:`UDPServer`, not from :class:`UnixStreamServer` --- the only difference between an IP and a Unix -stream server is the address family, which is simply repeated in both Unix -server classes. +server is the address family. .. class:: ForkingMixIn @@ -177,8 +176,7 @@ expensive or inappropriate for the service) is to maintain an explicit table of partially finished requests and to use :mod:`selectors` to decide which request to work on next (or whether to handle a new incoming request). This is particularly important for stream services where each client can potentially be -connected for a long time (if threads or subprocesses cannot be used). See -:mod:`asyncore` for another way to manage this. +connected for a long time (if threads or subprocesses cannot be used). .. XXX should data and methods be intermingled, or separate? how should the distinction between class and instance variables be drawn? @@ -432,11 +430,8 @@ Request Handler Objects The :attr:`self.rfile` and :attr:`self.wfile` attributes can be read or written, respectively, to get the request data or return data to the client. - - The :attr:`rfile` attributes of both classes support the - :class:`io.BufferedIOBase` readable interface, and - :attr:`DatagramRequestHandler.wfile` supports the - :class:`io.BufferedIOBase` writable interface. + The :attr:`!rfile` attributes support the :class:`io.BufferedIOBase` readable interface, + and :attr:`!wfile` attributes support the :class:`!io.BufferedIOBase` writable interface. .. versionchanged:: 3.6 :attr:`StreamRequestHandler.wfile` also supports the diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index a9bab9add763cc..3622864a4b06f9 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -239,6 +239,7 @@ inserted data and retrieved values from it in multiple ways. * :ref:`sqlite3-adapters` * :ref:`sqlite3-converters` * :ref:`sqlite3-connection-context-manager` + * :ref:`sqlite3-howto-row-factory` * :ref:`sqlite3-explanation` for in-depth background on transaction control. @@ -258,7 +259,8 @@ Module functions .. function:: connect(database, timeout=5.0, detect_types=0, \ isolation_level="DEFERRED", check_same_thread=True, \ factory=sqlite3.Connection, cached_statements=128, \ - uri=False) + uri=False, \*, \ + autocommit=sqlite3.LEGACY_TRANSACTION_CONTROL) Open a connection to an SQLite database. @@ -290,11 +292,13 @@ Module functions By default (``0``), type detection is disabled. :param isolation_level: - The :attr:`~Connection.isolation_level` of the connection, - controlling whether and how transactions are implicitly opened. + Control legacy transaction handling behaviour. + See :attr:`Connection.isolation_level` and + :ref:`sqlite3-transaction-control-isolation-level` for more information. Can be ``"DEFERRED"`` (default), ``"EXCLUSIVE"`` or ``"IMMEDIATE"``; or ``None`` to disable opening transactions implicitly. - See :ref:`sqlite3-controlling-transactions` for more. + Has no effect unless :attr:`Connection.autocommit` is set to + :data:`~sqlite3.LEGACY_TRANSACTION_CONTROL` (the default). :type isolation_level: str | None :param bool check_same_thread: @@ -321,6 +325,15 @@ Module functions The query string allows passing parameters to SQLite, enabling various :ref:`sqlite3-uri-tricks`. + :param autocommit: + Control :pep:`249` transaction handling behaviour. + See :attr:`Connection.autocommit` and + :ref:`sqlite3-transaction-control-autocommit` for more information. + *autocommit* currently defaults to + :data:`~sqlite3.LEGACY_TRANSACTION_CONTROL`. + The default will change to ``False`` in a future Python release. + :type autocommit: bool + :rtype: Connection .. audit-event:: sqlite3.connect database sqlite3.connect @@ -335,6 +348,9 @@ Module functions .. versionadded:: 3.10 The ``sqlite3.connect/handle`` auditing event. + .. versionadded:: 3.12 + The *autocommit* parameter. + .. function:: complete_statement(statement) Return ``True`` if the string *statement* appears to contain @@ -381,6 +397,7 @@ Module functions >>> con = sqlite3.connect(":memory:") >>> def evil_trace(stmt): ... 5/0 + ... >>> con.set_trace_callback(evil_trace) >>> def debug(unraisable): ... print(f"{unraisable.exc_value!r} in callback {unraisable.object.__name__}") @@ -418,6 +435,12 @@ Module functions Module constants ^^^^^^^^^^^^^^^^ +.. data:: LEGACY_TRANSACTION_CONTROL + + Set :attr:`~Connection.autocommit` to this constant to select + old style (pre-Python 3.12) transaction control behaviour. + See :ref:`sqlite3-transaction-control-isolation-level` for more information. + .. data:: PARSE_COLNAMES Pass this flag value to the *detect_types* parameter of @@ -478,9 +501,10 @@ Module constants .. note:: - The :mod:`!sqlite3` module supports both ``qmark`` and ``numeric`` DB-API - parameter styles, because that is what the underlying SQLite library - supports. However, the DB-API does not allow multiple values for + The :mod:`!sqlite3` module supports ``qmark``, ``numeric``, + and ``named`` DB-API parameter styles, + because that is what the underlying SQLite library supports. + However, the DB-API does not allow multiple values for the ``paramstyle`` attribute. .. data:: sqlite_version @@ -615,18 +639,27 @@ Connection objects .. method:: commit() Commit any pending transaction to the database. - If there is no open transaction, this method is a no-op. + If :attr:`autocommit` is ``True``, or there is no open transaction, + this method does nothing. + If :attr:`!autocommit` is ``False``, a new transaction is implicitly + opened if a pending transaction was committed by this method. .. method:: rollback() Roll back to the start of any pending transaction. - If there is no open transaction, this method is a no-op. + If :attr:`autocommit` is ``True``, or there is no open transaction, + this method does nothing. + If :attr:`!autocommit` is ``False``, a new transaction is implicitly + opened if a pending transaction was rolled back by this method. .. method:: close() Close the database connection. - Any pending transaction is not committed implicitly; - make sure to :meth:`commit` before closing + If :attr:`autocommit` is ``False``, + any pending transaction is implicitly rolled back. + If :attr:`!autocommit` is ``True`` or :data:`LEGACY_TRANSACTION_CONTROL`, + no implicit transaction control is executed. + Make sure to :meth:`commit` before closing to avoid losing pending changes. .. method:: execute(sql, parameters=(), /) @@ -1223,6 +1256,38 @@ Connection objects .. versionadded:: 3.11 + .. attribute:: autocommit + + This attribute controls :pep:`249`-compliant transaction behaviour. + :attr:`!autocommit` has three allowed values: + + * ``False``: Select :pep:`249`-compliant transaction behaviour, + implying that :mod:`!sqlite3` ensures a transaction is always open. + Use :meth:`commit` and :meth:`rollback` to close transactions. + + This is the recommended value of :attr:`!autocommit`. + + * ``True``: Use SQLite's `autocommit mode`_. + :meth:`commit` and :meth:`rollback` have no effect in this mode. + + * :data:`LEGACY_TRANSACTION_CONTROL`: + Pre-Python 3.12 (non-:pep:`249`-compliant) transaction control. + See :attr:`isolation_level` for more details. + + This is currently the default value of :attr:`!autocommit`. + + Changing :attr:`!autocommit` to ``False`` will open a new transaction, + and changing it to ``True`` will commit any pending transaction. + + See :ref:`sqlite3-transaction-control-autocommit` for more details. + + .. note:: + + The :attr:`isolation_level` attribute has no effect unless + :attr:`autocommit` is :data:`LEGACY_TRANSACTION_CONTROL`. + + .. versionadded:: 3.12 + .. attribute:: in_transaction This read-only attribute corresponds to the low-level SQLite @@ -1235,44 +1300,34 @@ Connection objects .. attribute:: isolation_level - This attribute controls the :ref:`transaction handling - <sqlite3-controlling-transactions>` performed by :mod:`!sqlite3`. + Controls the :ref:`legacy transaction handling mode + <sqlite3-transaction-control-isolation-level>` of :mod:`!sqlite3`. If set to ``None``, transactions are never implicitly opened. If set to one of ``"DEFERRED"``, ``"IMMEDIATE"``, or ``"EXCLUSIVE"``, corresponding to the underlying `SQLite transaction behaviour`_, - implicit :ref:`transaction management - <sqlite3-controlling-transactions>` is performed. + :ref:`implicit transaction management + <sqlite3-transaction-control-isolation-level>` is performed. If not overridden by the *isolation_level* parameter of :func:`connect`, the default is ``""``, which is an alias for ``"DEFERRED"``. - .. attribute:: row_factory - - A callable that accepts two arguments, - a :class:`Cursor` object and the raw row results as a :class:`tuple`, - and returns a custom object representing an SQLite row. - - Example: + .. note:: - .. doctest:: + Using :attr:`autocommit` to control transaction handling is + recommended over using :attr:`!isolation_level`. + :attr:`!isolation_level` has no effect unless :attr:`autocommit` is + set to :data:`LEGACY_TRANSACTION_CONTROL` (the default). - >>> def dict_factory(cursor, row): - ... col_names = [col[0] for col in cursor.description] - ... return {key: value for key, value in zip(col_names, row)} - >>> con = sqlite3.connect(":memory:") - >>> con.row_factory = dict_factory - >>> for row in con.execute("SELECT 1 AS a, 2 AS b"): - ... print(row) - {'a': 1, 'b': 2} + .. attribute:: row_factory - If returning a tuple doesn't suffice and you want name-based access to - columns, you should consider setting :attr:`row_factory` to the - highly optimized :class:`sqlite3.Row` type. :class:`Row` provides both - index-based and case-insensitive name-based access to columns with almost no - memory overhead. It will probably be better than your own custom - dictionary-based approach or even a db_row based solution. + The initial :attr:`~Cursor.row_factory` + for :class:`Cursor` objects created from this connection. + Assigning to this attribute does not affect the :attr:`!row_factory` + of existing cursors belonging to this connection, only new ones. + Is ``None`` by default, + meaning each row is returned as a :class:`tuple`. - .. XXX what's a db_row-based solution? + See :ref:`sqlite3-howto-row-factory` for more details. .. attribute:: text_factory @@ -1374,7 +1429,9 @@ Cursor objects :meth:`executescript` if you want to execute multiple SQL statements with one call. - If :attr:`~Connection.isolation_level` is not ``None``, + If :attr:`~Connection.autocommit` is + :data:`LEGACY_TRANSACTION_CONTROL`, + :attr:`~Connection.isolation_level` is not ``None``, *sql* is an ``INSERT``, ``UPDATE``, ``DELETE``, or ``REPLACE`` statement, and there is no open transaction, a transaction is implicitly opened before executing *sql*. @@ -1402,7 +1459,9 @@ Cursor objects .. method:: executescript(sql_script, /) Execute the SQL statements in *sql_script*. - If there is a pending transaction, + If the :attr:`~Connection.autocommit` is + :data:`LEGACY_TRANSACTION_CONTROL` + and there is a pending transaction, an implicit ``COMMIT`` statement is executed first. No other implicit transaction control is performed; any transaction control must be added to *sql_script*. @@ -1425,7 +1484,7 @@ Cursor objects .. method:: fetchone() - If :attr:`~Connection.row_factory` is ``None``, + If :attr:`~Cursor.row_factory` is ``None``, return the next row query result set as a :class:`tuple`. Else, pass it to the row factory and return its result. Return ``None`` if no more data is available. @@ -1519,6 +1578,22 @@ Cursor objects including :abbr:`CTE (Common Table Expression)` queries. It is only updated by the :meth:`execute` and :meth:`executemany` methods. + .. attribute:: row_factory + + Control how a row fetched from this :class:`!Cursor` is represented. + If ``None``, a row is represented as a :class:`tuple`. + Can be set to the included :class:`sqlite3.Row`; + or a :term:`callable` that accepts two arguments, + a :class:`Cursor` object and the :class:`!tuple` of row values, + and returns a custom object representing an SQLite row. + + Defaults to what :attr:`Connection.row_factory` was set to + when the :class:`!Cursor` was created. + Assigning to this attribute does not affect + :attr:`Connection.row_factory` of the parent connection. + + See :ref:`sqlite3-howto-row-factory` for more details. + .. The sqlite3.Row example used to be a how-to. It has now been incorporated into the Row reference. We keep the anchor here in order not to break @@ -1537,7 +1612,10 @@ Row objects It supports iteration, equality testing, :func:`len`, and :term:`mapping` access by column name and index. - Two row objects compare equal if have equal columns and equal members. + Two :class:`!Row` objects compare equal + if they have identical column names and values. + + See :ref:`sqlite3-howto-row-factory` for more details. .. method:: keys @@ -1548,21 +1626,6 @@ Row objects .. versionchanged:: 3.5 Added support of slicing. - Example: - - .. doctest:: - - >>> con = sqlite3.connect(":memory:") - >>> con.row_factory = sqlite3.Row - >>> res = con.execute("SELECT 'Earth' AS name, 6378 AS radius") - >>> row = res.fetchone() - >>> row.keys() - ['name', 'radius'] - >>> row[0], row["name"] # Access by index and name. - ('Earth', 'Earth') - >>> row["RADIUS"] # Column names are case-insensitive. - 6378 - .. _sqlite3-blob-objects: @@ -1833,8 +1896,13 @@ The deprecated default adapters and converters consist of: Command-line interface ^^^^^^^^^^^^^^^^^^^^^^ -The :mod:`!sqlite3` module can be invoked as a script +The :mod:`!sqlite3` module can be invoked as a script, +using the interpreter's :option:`-m` switch, in order to provide a simple SQLite shell. +The argument signature is as follows:: + + python -m sqlite3 [-h] [-v] [filename] [sql] + Type ``.quit`` or CTRL-D to exit the shell. .. program:: python -m sqlite3 [-h] [-v] [filename] [sql] @@ -1862,12 +1930,16 @@ How to use placeholders to bind values in SQL queries SQL operations usually need to use values from Python variables. However, beware of using Python's string operations to assemble queries, as they -are vulnerable to `SQL injection attacks`_ (see the `xkcd webcomic -<https://xkcd.com/327/>`_ for a humorous example of what can go wrong):: +are vulnerable to `SQL injection attacks`_. For example, an attacker can simply +close the single quote and inject ``OR TRUE`` to select all rows:: - # Never do this -- insecure! - symbol = 'RHAT' - cur.execute("SELECT * FROM stocks WHERE symbol = '%s'" % symbol) + >>> # Never do this -- insecure! + >>> symbol = input() + ' OR TRUE; -- + >>> sql = "SELECT * FROM stocks WHERE symbol = '%s'" % symbol + >>> print(sql) + SELECT * FROM stocks WHERE symbol = '' OR TRUE; --' + >>> cur.execute(sql) Instead, use the DB-API's parameter substitution. To insert a variable into a query string, use a placeholder in the string, and substitute the actual values @@ -2040,7 +2112,7 @@ The following example illustrates the implicit and explicit approaches: return f"Point({self.x}, {self.y})" def adapt_point(point): - return f"{point.x};{point.y}".encode("utf-8") + return f"{point.x};{point.y}" def convert_point(s): x, y = list(map(float, s.split(b";"))) @@ -2106,20 +2178,39 @@ This section shows recipes for common adapters and converters. def convert_date(val): """Convert ISO 8601 date to datetime.date object.""" - return datetime.date.fromisoformat(val) + return datetime.date.fromisoformat(val.decode()) def convert_datetime(val): """Convert ISO 8601 datetime to datetime.datetime object.""" - return datetime.datetime.fromisoformat(val) + return datetime.datetime.fromisoformat(val.decode()) def convert_timestamp(val): """Convert Unix epoch timestamp to datetime.datetime object.""" - return datetime.datetime.fromtimestamp(val) + return datetime.datetime.fromtimestamp(int(val)) sqlite3.register_converter("date", convert_date) sqlite3.register_converter("datetime", convert_datetime) sqlite3.register_converter("timestamp", convert_timestamp) +.. testcode:: + :hide: + + dt = datetime.datetime(2019, 5, 18, 15, 17, 8, 123456) + + assert adapt_date_iso(dt.date()) == "2019-05-18" + assert convert_date(b"2019-05-18") == dt.date() + + assert adapt_datetime_iso(dt) == "2019-05-18T15:17:08.123456" + assert convert_datetime(b"2019-05-18T15:17:08.123456") == dt + + # Using current time as fromtimestamp() returns local date/time. + # Droping microseconds as adapt_datetime_epoch truncates fractional second part. + now = datetime.datetime.now().replace(microsecond=0) + current_timestamp = int(now.timestamp()) + + assert adapt_datetime_epoch(now) == current_timestamp + assert convert_timestamp(str(current_timestamp).encode()) == now + .. _sqlite3-connection-shortcuts: @@ -2177,9 +2268,12 @@ the transaction is committed. If this commit fails, or if the body of the ``with`` statement raises an uncaught exception, the transaction is rolled back. +If :attr:`~Connection.autocommit` is ``False``, +a new transaction is implicitly opened after committing or rolling back. If there is no open transaction upon leaving the body of the ``with`` statement, -the context manager is a no-op. +or if :attr:`~Connection.autocommit` is ``True``, +the context manager does nothing. .. note:: @@ -2259,18 +2353,167 @@ can be found in the `SQLite URI documentation`_. .. _SQLite URI documentation: https://www.sqlite.org/uri.html +.. _sqlite3-howto-row-factory: + +How to create and use row factories +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +By default, :mod:`!sqlite3` represents each row as a :class:`tuple`. +If a :class:`!tuple` does not suit your needs, +you can use the :class:`sqlite3.Row` class +or a custom :attr:`~Cursor.row_factory`. + +While :attr:`!row_factory` exists as an attribute both on the +:class:`Cursor` and the :class:`Connection`, +it is recommended to set :class:`Connection.row_factory`, +so all cursors created from the connection will use the same row factory. + +:class:`!Row` provides indexed and case-insensitive named access to columns, +with minimal memory overhead and performance impact over a :class:`!tuple`. +To use :class:`!Row` as a row factory, +assign it to the :attr:`!row_factory` attribute: + +.. doctest:: + + >>> con = sqlite3.connect(":memory:") + >>> con.row_factory = sqlite3.Row + +Queries now return :class:`!Row` objects: + +.. doctest:: + + >>> res = con.execute("SELECT 'Earth' AS name, 6378 AS radius") + >>> row = res.fetchone() + >>> row.keys() + ['name', 'radius'] + >>> row[0] # Access by index. + 'Earth' + >>> row["name"] # Access by name. + 'Earth' + >>> row["RADIUS"] # Column names are case-insensitive. + 6378 + +You can create a custom :attr:`~Cursor.row_factory` +that returns each row as a :class:`dict`, with column names mapped to values: + +.. testcode:: + + def dict_factory(cursor, row): + fields = [column[0] for column in cursor.description] + return {key: value for key, value in zip(fields, row)} + +Using it, queries now return a :class:`!dict` instead of a :class:`!tuple`: + +.. doctest:: + + >>> con = sqlite3.connect(":memory:") + >>> con.row_factory = dict_factory + >>> for row in con.execute("SELECT 1 AS a, 2 AS b"): + ... print(row) + {'a': 1, 'b': 2} + +The following row factory returns a :term:`named tuple`: + +.. testcode:: + + from collections import namedtuple + + def namedtuple_factory(cursor, row): + fields = [column[0] for column in cursor.description] + cls = namedtuple("Row", fields) + return cls._make(row) + +:func:`!namedtuple_factory` can be used as follows: + +.. doctest:: + + >>> con = sqlite3.connect(":memory:") + >>> con.row_factory = namedtuple_factory + >>> cur = con.execute("SELECT 1 AS a, 2 AS b") + >>> row = cur.fetchone() + >>> row + Row(a=1, b=2) + >>> row[0] # Indexed access. + 1 + >>> row.b # Attribute access. + 2 + +With some adjustments, the above recipe can be adapted to use a +:class:`~dataclasses.dataclass`, or any other custom class, +instead of a :class:`~collections.namedtuple`. + + .. _sqlite3-explanation: Explanation ----------- +.. _sqlite3-transaction-control: .. _sqlite3-controlling-transactions: Transaction control ^^^^^^^^^^^^^^^^^^^ -The :mod:`!sqlite3` module does not adhere to the transaction handling recommended -by :pep:`249`. +:mod:`!sqlite3` offers multiple methods of controlling whether, +when and how database transactions are opened and closed. +:ref:`sqlite3-transaction-control-autocommit` is recommended, +while :ref:`sqlite3-transaction-control-isolation-level` +retains the pre-Python 3.12 behaviour. + +.. _sqlite3-transaction-control-autocommit: + +Transaction control via the ``autocommit`` attribute +"""""""""""""""""""""""""""""""""""""""""""""""""""" + +The recommended way of controlling transaction behaviour is through +the :attr:`Connection.autocommit` attribute, +which should preferrably be set using the *autocommit* parameter +of :func:`connect`. + +It is suggested to set *autocommit* to ``False``, +which implies :pep:`249`-compliant transaction control. +This means: + +* :mod:`!sqlite3` ensures that a transaction is always open, + so :func:`connect`, :meth:`Connection.commit`, and :meth:`Connection.rollback` + will implicitly open a new transaction + (immediately after closing the pending one, for the latter two). + :mod:`!sqlite3` uses ``BEGIN DEFERRED`` statements when opening transactions. +* Transactions should be committed explicitly using :meth:`!commit`. +* Transactions should be rolled back explicitly using :meth:`!rollback`. +* An implicit rollback is performed if the database is + :meth:`~Connection.close`-ed with pending changes. + +Set *autocommit* to ``True`` to enable SQLite's `autocommit mode`_. +In this mode, :meth:`Connection.commit` and :meth:`Connection.rollback` +have no effect. +Note that SQLite's autocommit mode is distinct from +the :pep:`249`-compliant :attr:`Connection.autocommit` attribute; +use :attr:`Connection.in_transaction` to query +the low-level SQLite autocommit mode. + +Set *autocommit* to :data:`LEGACY_TRANSACTION_CONTROL` +to leave transaction control behaviour to the +:attr:`Connection.isolation_level` attribute. +See :ref:`sqlite3-transaction-control-isolation-level` for more information. + + +.. _sqlite3-transaction-control-isolation-level: + +Transaction control via the ``isolation_level`` attribute +""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +.. note:: + + The recommended way of controlling transactions is via the + :attr:`~Connection.autocommit` attribute. + See :ref:`sqlite3-transaction-control-autocommit`. + +If :attr:`Connection.autocommit` is set to +:data:`LEGACY_TRANSACTION_CONTROL` (the default), +transaction behaviour is controlled using +the :attr:`Connection.isolation_level` attribute. +Otherwise, :attr:`!isolation_level` has no effect. If the connection attribute :attr:`~Connection.isolation_level` is not ``None``, @@ -2301,6 +2544,10 @@ regardless of the value of :attr:`~Connection.isolation_level`. :mod:`!sqlite3` used to implicitly commit an open transaction before DDL statements. This is no longer the case. +.. versionchanged:: 3.12 + The recommended way of controlling transactions is now via the + :attr:`~Connection.autocommit` attribute. + .. _autocommit mode: https://www.sqlite.org/lang_transaction.html#implicit_versus_explicit_transactions diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index f5d5a7c4d28205..08824feeb3958f 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -74,13 +74,10 @@ Functions, Constants, and Exceptions Socket creation ^^^^^^^^^^^^^^^ -Since Python 3.2 and 2.7.9, it is recommended to use the -:meth:`SSLContext.wrap_socket` of an :class:`SSLContext` instance to wrap -sockets as :class:`SSLSocket` objects. The helper functions +Instances of :class:`SSLSocket` must be created using the +:meth:`SSLContext.wrap_socket` method. The helper function :func:`create_default_context` returns a new context with secure default -settings. The old :func:`wrap_socket` function is deprecated since it is -both inefficient and has no support for server name indication (SNI) and -hostname matching. +settings. Client socket example with default context and IPv4/IPv6 dual stack:: @@ -369,10 +366,10 @@ Certificate handling Given the address ``addr`` of an SSL-protected server, as a (*hostname*, *port-number*) pair, fetches the server's certificate, and returns it as a PEM-encoded string. If ``ssl_version`` is specified, uses that version of - the SSL protocol to attempt to connect to the server. If ``ca_certs`` is + the SSL protocol to attempt to connect to the server. If *ca_certs* is specified, it should be a file containing a list of root certificates, the - same format as used for the same parameter in - :meth:`SSLContext.wrap_socket`. The call will attempt to validate the + same format as used for the *cafile* parameter in + :meth:`SSLContext.load_verify_locations`. The call will attempt to validate the server certificate against that set of root certificates, and will fail if the validation attempt fails. A timeout can be specified with the ``timeout`` parameter. @@ -451,33 +448,6 @@ Certificate handling .. versionadded:: 3.4 -.. function:: wrap_socket(sock, keyfile=None, certfile=None, \ - server_side=False, cert_reqs=CERT_NONE, ssl_version=PROTOCOL_TLS, \ - ca_certs=None, do_handshake_on_connect=True, \ - suppress_ragged_eofs=True, ciphers=None) - - Takes an instance ``sock`` of :class:`socket.socket`, and returns an instance - of :class:`ssl.SSLSocket`, a subtype of :class:`socket.socket`, which wraps - the underlying socket in an SSL context. ``sock`` must be a - :data:`~socket.SOCK_STREAM` socket; other socket types are unsupported. - - Internally, function creates a :class:`SSLContext` with protocol - *ssl_version* and :attr:`SSLContext.options` set to *cert_reqs*. If - parameters *keyfile*, *certfile*, *ca_certs* or *ciphers* are set, then - the values are passed to :meth:`SSLContext.load_cert_chain`, - :meth:`SSLContext.load_verify_locations`, and - :meth:`SSLContext.set_ciphers`. - - The arguments *server_side*, *do_handshake_on_connect*, and - *suppress_ragged_eofs* have the same meaning as - :meth:`SSLContext.wrap_socket`. - - .. deprecated:: 3.7 - - Since Python 3.2 and 2.7.9, it is recommended to use the - :meth:`SSLContext.wrap_socket` instead of :func:`wrap_socket`. The - top-level function is limited and creates an insecure client socket - without server name indication or hostname matching. Constants ^^^^^^^^^ @@ -488,8 +458,8 @@ Constants .. data:: CERT_NONE - Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` - parameter to :func:`wrap_socket`. Except for :const:`PROTOCOL_TLS_CLIENT`, + Possible value for :attr:`SSLContext.verify_mode`. + Except for :const:`PROTOCOL_TLS_CLIENT`, it is the default mode. With client-side sockets, just about any cert is accepted. Validation errors, such as untrusted or expired cert, are ignored and do not abort the TLS/SSL handshake. @@ -501,8 +471,8 @@ Constants .. data:: CERT_OPTIONAL - Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` - parameter to :func:`wrap_socket`. In client mode, :const:`CERT_OPTIONAL` + Possible value for :attr:`SSLContext.verify_mode`. + In client mode, :const:`CERT_OPTIONAL` has the same meaning as :const:`CERT_REQUIRED`. It is recommended to use :const:`CERT_REQUIRED` for client-side sockets instead. @@ -513,13 +483,12 @@ Constants the TLS handshake. Use of this setting requires a valid set of CA certificates to - be passed, either to :meth:`SSLContext.load_verify_locations` or as a - value of the ``ca_certs`` parameter to :func:`wrap_socket`. + be passed to :meth:`SSLContext.load_verify_locations`. .. data:: CERT_REQUIRED - Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` - parameter to :func:`wrap_socket`. In this mode, certificates are + Possible value for :attr:`SSLContext.verify_mode`. + In this mode, certificates are required from the other side of the socket connection; an :class:`SSLError` will be raised if no certificate is provided, or if its validation fails. This mode is **not** sufficient to verify a certificate in client mode as @@ -533,8 +502,7 @@ Constants the client must provide a valid and trusted certificate. Use of this setting requires a valid set of CA certificates to - be passed, either to :meth:`SSLContext.load_verify_locations` or as a - value of the ``ca_certs`` parameter to :func:`wrap_socket`. + be passed to :meth:`SSLContext.load_verify_locations`. .. class:: VerifyMode @@ -839,6 +807,22 @@ Constants .. versionadded:: 3.10 +.. data:: OP_ENABLE_KTLS + + Enable the use of the kernel TLS. To benefit from the feature, OpenSSL must + have been compiled with support for it, and the negotiated cipher suites and + extensions must be supported by it (a list of supported ones may vary by + platform and kernel version). + + Note that with enabled kernel TLS some cryptographic operations are + performed by the kernel directly and not via any available OpenSSL + Providers. This might be undesirable if, for example, the application + requires all cryptographic operations to be performed by the FIPS provider. + + This option is only available with OpenSSL 3.0.0 and later. + + .. versionadded:: 3.12 + .. data:: HAS_ALPN Whether the OpenSSL library has built-in support for the *Application-Layer @@ -1327,10 +1311,7 @@ SSL sockets also have the following additional methods and attributes: .. attribute:: SSLSocket.context - The :class:`SSLContext` object this SSL socket is tied to. If the SSL - socket was created using the deprecated :func:`wrap_socket` function - (rather than :meth:`SSLContext.wrap_socket`), this is a custom context - object created for this SSL socket. + The :class:`SSLContext` object this SSL socket is tied to. .. versionadded:: 3.2 @@ -2086,7 +2067,7 @@ Combined key and certificate Often the private key is stored in the same file as the certificate; in this case, only the ``certfile`` parameter to :meth:`SSLContext.load_cert_chain` -and :func:`wrap_socket` needs to be passed. If the private key is stored +needs to be passed. If the private key is stored with the certificate, it should come before the first certificate in the certificate chain:: diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index 88a887960edb58..f934b0e0319dca 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -996,6 +996,7 @@ probability that the Python room will stay within its capacity limits? >>> seed(8675309) >>> def trial(): ... return choices(('Python', 'Ruby'), (p, q), k=n).count('Python') + ... >>> mean(trial() <= k for i in range(10_000)) 0.8398 diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 6701d794b5111b..c785336944f50a 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -353,7 +353,7 @@ Notes: The numeric literals accepted include the digits ``0`` to ``9`` or any Unicode equivalent (code points with the ``Nd`` property). - See https://www.unicode.org/Public/15.0.0/ucd/extracted/DerivedNumericType.txt + See `the Unicode Standard <https://unicode.org/Public/UNIDATA/extracted/DerivedNumericType.txt>`_ for a complete list of code points with the ``Nd`` property. @@ -1522,7 +1522,7 @@ multiple fragments. printable string representation of *object*. For string objects, this is the string itself. If *object* does not have a :meth:`~object.__str__` method, then :func:`str` falls back to returning - :meth:`repr(object) <repr>`. + :func:`repr(object) <repr>`. .. index:: single: buffer protocol; str (built-in class) @@ -1597,8 +1597,9 @@ expression support in the :mod:`re` module). lowercase, :meth:`lower` would do nothing to ``'ß'``; :meth:`casefold` converts it to ``"ss"``. - The casefolding algorithm is described in section 3.13 of the Unicode - Standard. + The casefolding algorithm is + `described in section 3.13 of the Unicode Standard + <http://www.unicode.org/versions/Unicode15.0.0/ch03.pdf#G53253>`__. .. versionadded:: 3.3 @@ -1617,6 +1618,9 @@ expression support in the :mod:`re` module). range [*start*, *end*]. Optional arguments *start* and *end* are interpreted as in slice notation. + If *sub* is empty, returns the number of empty strings between characters + which is the length of the string plus one. + .. method:: str.encode(encoding="utf-8", errors="strict") @@ -1754,7 +1758,8 @@ expression support in the :mod:`re` module). one character, ``False`` otherwise. Alphabetic characters are those characters defined in the Unicode character database as "Letter", i.e., those with general category property being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is different - from the "Alphabetic" property defined in the Unicode Standard. + from the `Alphabetic property defined in the Unicode Standard + <https://www.unicode.org/versions/Unicode15.0.0/ch04.pdf#G91002>`_. .. method:: str.isascii() @@ -1791,7 +1796,7 @@ expression support in the :mod:`re` module). Return ``True`` if the string is a valid identifier according to the language definition, section :ref:`identifiers`. - Call :func:`keyword.iskeyword` to test whether string ``s`` is a reserved + :func:`keyword.iskeyword` can be used to test whether string ``s`` is a reserved identifier, such as :keyword:`def` and :keyword:`class`. Example: @@ -1888,8 +1893,9 @@ expression support in the :mod:`re` module). Return a copy of the string with all the cased characters [4]_ converted to lowercase. - The lowercasing algorithm used is described in section 3.13 of the Unicode - Standard. + The lowercasing algorithm used is + `described in section 3.13 of the Unicode Standard + <https://www.unicode.org/versions/Unicode15.0.0/ch03.pdf#G34078>`__. .. method:: str.lstrip([chars]) @@ -2233,8 +2239,9 @@ expression support in the :mod:`re` module). character(s) is not "Lu" (Letter, uppercase), but e.g. "Lt" (Letter, titlecase). - The uppercasing algorithm used is described in section 3.13 of the Unicode - Standard. + The uppercasing algorithm used is + `described in section 3.13 of the Unicode Standard + <https://www.unicode.org/versions/Unicode15.0.0/ch03.pdf#G34078>`__. .. method:: str.zfill(width) @@ -2698,6 +2705,9 @@ arbitrary binary data. The subsequence to search for may be any :term:`bytes-like object` or an integer in the range 0 to 255. + If *sub* is empty, returns the number of empty slices between characters + which is the length of the bytes object plus one. + .. versionchanged:: 3.3 Also accept an integer in the range 0 to 255 as the subsequence. @@ -4449,6 +4459,7 @@ can be used interchangeably to index the same dictionary entry. >>> class Counter(dict): ... def __missing__(self, key): ... return 0 + ... >>> c = Counter() >>> c['red'] 0 @@ -4706,6 +4717,7 @@ An example of dictionary view usage:: >>> n = 0 >>> for val in values: ... n += val + ... >>> print(n) 504 @@ -4731,7 +4743,7 @@ An example of dictionary view usage:: >>> # get back a read-only proxy for the original dictionary >>> values.mapping - mappingproxy({'eggs': 2, 'sausage': 1, 'bacon': 1, 'spam': 500}) + mappingproxy({'bacon': 1, 'spam': 500}) >>> values.mapping['spam'] 500 @@ -5491,7 +5503,7 @@ When an operation would exceed the limit, a :exc:`ValueError` is raised: >>> _ = int('2' * 5432) Traceback (most recent call last): ... - ValueError: Exceeds the limit (4300) for integer string conversion: value has 5432 digits; use sys.set_int_max_str_digits() to increase the limit. + ValueError: Exceeds the limit (4300 digits) for integer string conversion: value has 5432 digits; use sys.set_int_max_str_digits() to increase the limit. >>> i = int('2' * 4300) >>> len(str(i)) 4300 @@ -5499,7 +5511,7 @@ When an operation would exceed the limit, a :exc:`ValueError` is raised: >>> len(str(i_squared)) Traceback (most recent call last): ... - ValueError: Exceeds the limit (4300) for integer string conversion: value has 8599 digits; use sys.set_int_max_str_digits() to increase the limit. + ValueError: Exceeds the limit (4300 digits) for integer string conversion: value has 8599 digits; use sys.set_int_max_str_digits() to increase the limit. >>> len(hex(i_squared)) 7144 >>> assert int(hex(i_squared), base=16) == i*i # Hexadecimal is unlimited. diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst index 620f50376beb62..50d70731f77523 100644 --- a/Doc/library/struct.rst +++ b/Doc/library/struct.rst @@ -12,21 +12,25 @@ -------------- -This module performs conversions between Python values and C structs represented -as Python :class:`bytes` objects. This can be used in handling binary data -stored in files or from network connections, among other sources. It uses -:ref:`struct-format-strings` as compact descriptions of the layout of the C -structs and the intended conversion to/from Python values. +This module converts between Python values and C structs represented +as Python :class:`bytes` objects. Compact :ref:`format strings <struct-format-strings>` +describe the intended conversions to/from Python values. +The module's functions and objects can be used for two largely +distinct applications, data exchange with external sources (files or +network connections), or data transfer between the Python application +and the C layer. .. note:: - By default, the result of packing a given C struct includes pad bytes in - order to maintain proper alignment for the C types involved; similarly, - alignment is taken into account when unpacking. This behavior is chosen so - that the bytes of a packed struct correspond exactly to the layout in memory - of the corresponding C struct. To handle platform-independent data formats - or omit implicit pad bytes, use ``standard`` size and alignment instead of - ``native`` size and alignment: see :ref:`struct-alignment` for details. + When no prefix character is given, native mode is the default. It + packs or unpacks data based on the platform and compiler on which + the Python interpreter was built. + The result of packing a given C struct includes pad bytes which + maintain proper alignment for the C types involved; similarly, + alignment is taken into account when unpacking. In contrast, when + communicating data between external sources, the programmer is + responsible for defining byte ordering and padding between elements. + See :ref:`struct-alignment` for details. Several :mod:`struct` functions (and methods of :class:`Struct`) take a *buffer* argument. This refers to objects that implement the :ref:`bufferobjects` and @@ -102,10 +106,13 @@ The module defines the following exception and functions: Format Strings -------------- -Format strings are the mechanism used to specify the expected layout when -packing and unpacking data. They are built up from :ref:`format-characters`, -which specify the type of data being packed/unpacked. In addition, there are -special characters for controlling the :ref:`struct-alignment`. +Format strings describe the data layout when +packing and unpacking data. They are built up from :ref:`format characters<format-characters>`, +which specify the type of data being packed/unpacked. In addition, +special characters control the :ref:`byte order, size and alignment<struct-alignment>`. +Each format string consists of an optional prefix character which +describes the overall properties of the data and one or more format +characters which describe the actual data values and padding. .. _struct-alignment: @@ -116,6 +123,11 @@ Byte Order, Size, and Alignment By default, C types are represented in the machine's native format and byte order, and properly aligned by skipping pad bytes if necessary (according to the rules used by the C compiler). +This behavior is chosen so +that the bytes of a packed struct correspond exactly to the memory layout +of the corresponding C struct. +Whether to use native byte ordering +and padding or standard formats depends on the application. .. index:: single: @ (at); in struct format strings @@ -144,12 +156,10 @@ following table: If the first character is not one of these, ``'@'`` is assumed. -Native byte order is big-endian or little-endian, depending on the host -system. For example, Intel x86 and AMD64 (x86-64) are little-endian; -IBM z and most legacy architectures are big-endian; -and ARM, RISC-V and IBM Power feature switchable endianness -(bi-endian, though the former two are nearly always little-endian in practice). -Use ``sys.byteorder`` to check the endianness of your system. +Native byte order is big-endian or little-endian, depending on the +host system. For example, Intel x86, AMD64 (x86-64), and Apple M1 are +little-endian; IBM z and many legacy architectures are big-endian. +Use :data:`sys.byteorder` to check the endianness of your system. Native size and alignment are determined using the C compiler's ``sizeof`` expression. This is always combined with native byte order. @@ -231,9 +241,9 @@ platform-dependent. +--------+--------------------------+--------------------+----------------+------------+ | ``d`` | :c:expr:`double` | float | 8 | \(4) | +--------+--------------------------+--------------------+----------------+------------+ -| ``s`` | :c:expr:`char[]` | bytes | | | +| ``s`` | :c:expr:`char[]` | bytes | | \(9) | +--------+--------------------------+--------------------+----------------+------------+ -| ``p`` | :c:expr:`char[]` | bytes | | | +| ``p`` | :c:expr:`char[]` | bytes | | \(8) | +--------+--------------------------+--------------------+----------------+------------+ | ``P`` | :c:expr:`void \*` | integer | | \(5) | +--------+--------------------------+--------------------+----------------+------------+ @@ -292,8 +302,33 @@ Notes: format <half precision format_>`_ for more information. (7) - For padding, ``x`` inserts null bytes. - + When packing, ``'x'`` inserts one NUL byte. + +(8) + The ``'p'`` format character encodes a "Pascal string", meaning a short + variable-length string stored in a *fixed number of bytes*, given by the count. + The first byte stored is the length of the string, or 255, whichever is + smaller. The bytes of the string follow. If the string passed in to + :func:`pack` is too long (longer than the count minus 1), only the leading + ``count-1`` bytes of the string are stored. If the string is shorter than + ``count-1``, it is padded with null bytes so that exactly count bytes in all + are used. Note that for :func:`unpack`, the ``'p'`` format character consumes + ``count`` bytes, but that the string returned can never contain more than 255 + bytes. + +(9) + For the ``'s'`` format character, the count is interpreted as the length of the + bytes, not a repeat count like for the other format characters; for example, + ``'10s'`` means a single 10-byte string mapping to or from a single + Python byte string, while ``'10c'`` means 10 + separate one byte character elements (e.g., ``cccccccccc``) mapping + to or from ten different Python byte objects. (See :ref:`struct-examples` + for a concrete demonstration of the difference.) + If a count is not given, it defaults to 1. For packing, the string is + truncated or padded with null bytes as appropriate to make it fit. For + unpacking, the resulting bytes object always has exactly the specified number + of bytes. As a special case, ``'0s'`` means a single, empty string (while + ``'0c'`` means 0 characters). A format character may be preceded by an integral repeat count. For example, the format string ``'4h'`` means exactly the same as ``'hhhh'``. @@ -301,15 +336,6 @@ the format string ``'4h'`` means exactly the same as ``'hhhh'``. Whitespace characters between formats are ignored; a count and its format must not contain whitespace though. -For the ``'s'`` format character, the count is interpreted as the length of the -bytes, not a repeat count like for the other format characters; for example, -``'10s'`` means a single 10-byte string, while ``'10c'`` means 10 characters. -If a count is not given, it defaults to 1. For packing, the string is -truncated or padded with null bytes as appropriate to make it fit. For -unpacking, the resulting bytes object always has exactly the specified number -of bytes. As a special case, ``'0s'`` means a single, empty string (while -``'0c'`` means 0 characters). - When packing a value ``x`` using one of the integer formats (``'b'``, ``'B'``, ``'h'``, ``'H'``, ``'i'``, ``'I'``, ``'l'``, ``'L'``, ``'q'``, ``'Q'``), if ``x`` is outside the valid range for that format @@ -319,17 +345,6 @@ then :exc:`struct.error` is raised. Previously, some of the integer formats wrapped out-of-range values and raised :exc:`DeprecationWarning` instead of :exc:`struct.error`. -The ``'p'`` format character encodes a "Pascal string", meaning a short -variable-length string stored in a *fixed number of bytes*, given by the count. -The first byte stored is the length of the string, or 255, whichever is -smaller. The bytes of the string follow. If the string passed in to -:func:`pack` is too long (longer than the count minus 1), only the leading -``count-1`` bytes of the string are stored. If the string is shorter than -``count-1``, it is padded with null bytes so that exactly count bytes in all -are used. Note that for :func:`unpack`, the ``'p'`` format character consumes -``count`` bytes, but that the string returned can never contain more than 255 -bytes. - .. index:: single: ? (question mark); in struct format strings For the ``'?'`` format character, the return value is either :const:`True` or @@ -345,18 +360,36 @@ Examples ^^^^^^^^ .. note:: - All examples assume a native byte order, size, and alignment with a - big-endian machine. + Native byte order examples (designated by the ``'@'`` format prefix or + lack of any prefix character) may not match what the reader's + machine produces as + that depends on the platform and compiler. + +Pack and unpack integers of three different sizes, using big endian +ordering:: -A basic example of packing/unpacking three integers:: + >>> from struct import * + >>> pack(">bhl", 1, 2, 3) + b'\x01\x00\x02\x00\x00\x00\x03' + >>> unpack('>bhl', b'\x01\x00\x02\x00\x00\x00\x03' + (1, 2, 3) + >>> calcsize('>bhl') + 7 - >>> from struct import * - >>> pack('hhl', 1, 2, 3) - b'\x00\x01\x00\x02\x00\x00\x00\x03' - >>> unpack('hhl', b'\x00\x01\x00\x02\x00\x00\x00\x03') - (1, 2, 3) - >>> calcsize('hhl') - 8 +Attempt to pack an integer which is too large for the defined field:: + + >>> pack(">h", 99999) + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + struct.error: 'h' format requires -32768 <= number <= 32767 + +Demonstrate the difference between ``'s'`` and ``'c'`` format +characters:: + + >>> pack("@ccc", b'1', b'2', b'3') + b'123' + >>> pack("@3s", b'123') + b'123' Unpacked fields can be named by assigning them to variables or by wrapping the result in a named tuple:: @@ -369,35 +402,132 @@ the result in a named tuple:: >>> Student._make(unpack('<10sHHb', record)) Student(name=b'raymond ', serialnum=4658, school=264, gradelevel=8) -The ordering of format characters may have an impact on size since the padding -needed to satisfy alignment requirements is different:: - - >>> pack('ci', b'*', 0x12131415) - b'*\x00\x00\x00\x12\x13\x14\x15' - >>> pack('ic', 0x12131415, b'*') - b'\x12\x13\x14\x15*' - >>> calcsize('ci') +The ordering of format characters may have an impact on size in native +mode since padding is implicit. In standard mode, the user is +responsible for inserting any desired padding. +Note in +the first ``pack`` call below that three NUL bytes were added after the +packed ``'#'`` to align the following integer on a four-byte boundary. +In this example, the output was produced on a little endian machine:: + + >>> pack('@ci', b'#', 0x12131415) + b'#\x00\x00\x00\x15\x14\x13\x12' + >>> pack('@ic', 0x12131415, b'#') + b'\x15\x14\x13\x12#' + >>> calcsize('@ci') 8 - >>> calcsize('ic') + >>> calcsize('@ic') 5 -The following format ``'llh0l'`` specifies two pad bytes at the end, assuming -longs are aligned on 4-byte boundaries:: +The following format ``'llh0l'`` results in two pad bytes being added +at the end, assuming the platform's longs are aligned on 4-byte boundaries:: - >>> pack('llh0l', 1, 2, 3) + >>> pack('@llh0l', 1, 2, 3) b'\x00\x00\x00\x01\x00\x00\x00\x02\x00\x03\x00\x00' -This only works when native size and alignment are in effect; standard size and -alignment does not enforce any alignment. - .. seealso:: Module :mod:`array` Packed binary storage of homogeneous data. - Module :mod:`xdrlib` - Packing and unpacking of XDR data. + Module :mod:`json` + JSON encoder and decoder. + + Module :mod:`pickle` + Python object serialization. + + +.. _applications: + +Applications +------------ + +Two main applications for the :mod:`struct` module exist, data +interchange between Python and C code within an application or another +application compiled using the same compiler (:ref:`native formats<struct-native-formats>`), and +data interchange between applications using agreed upon data layout +(:ref:`standard formats<struct-standard-formats>`). Generally speaking, the format strings +constructed for these two domains are distinct. + + +.. _struct-native-formats: + +Native Formats +^^^^^^^^^^^^^^ + +When constructing format strings which mimic native layouts, the +compiler and machine architecture determine byte ordering and padding. +In such cases, the ``@`` format character should be used to specify +native byte ordering and data sizes. Internal pad bytes are normally inserted +automatically. It is possible that a zero-repeat format code will be +needed at the end of a format string to round up to the correct +byte boundary for proper alignment of consective chunks of data. + +Consider these two simple examples (on a 64-bit, little-endian +machine):: + + >>> calcsize('@lhl') + 24 + >>> calcsize('@llh') + 18 + +Data is not padded to an 8-byte boundary at the end of the second +format string without the use of extra padding. A zero-repeat format +code solves that problem:: + + >>> calcsize('@llh0l') + 24 + +The ``'x'`` format code can be used to specify the repeat, but for +native formats it is better to use a zero-repeat format like ``'0l'``. + +By default, native byte ordering and alignment is used, but it is +better to be explicit and use the ``'@'`` prefix character. + + +.. _struct-standard-formats: + +Standard Formats +^^^^^^^^^^^^^^^^ + +When exchanging data beyond your process such as networking or storage, +be precise. Specify the exact byte order, size, and alignment. Do +not assume they match the native order of a particular machine. +For example, network byte order is big-endian, while many popular CPUs +are little-endian. By defining this explicitly, the user need not +care about the specifics of the platform their code is running on. +The first character should typically be ``<`` or ``>`` +(or ``!``). Padding is the responsibility of the programmer. The +zero-repeat format character won't work. Instead, the user must +explicitly add ``'x'`` pad bytes where needed. Revisiting the +examples from the previous section, we have:: + + >>> calcsize('<qh6xq') + 24 + >>> pack('<qh6xq', 1, 2, 3) == pack('@lhl', 1, 2, 3) + True + >>> calcsize('@llh') + 18 + >>> pack('@llh', 1, 2, 3) == pack('<qqh', 1, 2, 3) + True + >>> calcsize('<qqh6x') + 24 + >>> calcsize('@llh0l') + 24 + >>> pack('@llh0l', 1, 2, 3) == pack('<qqh6x', 1, 2, 3) + True + +The above results (executed on a 64-bit machine) aren't guaranteed to +match when executed on different machines. For example, the examples +below were executed on a 32-bit machine:: + + >>> calcsize('<qqh6x') + 24 + >>> calcsize('@llh0l') + 12 + >>> pack('@llh0l', 1, 2, 3) == pack('<qqh6x', 1, 2, 3) + False .. _struct-objects: @@ -411,9 +541,9 @@ The :mod:`struct` module also defines the following type: .. class:: Struct(format) Return a new Struct object which writes and reads binary data according to - the format string *format*. Creating a Struct object once and calling its - methods is more efficient than calling the :mod:`struct` functions with the - same format since the format string only needs to be compiled once. + the format string *format*. Creating a ``Struct`` object once and calling its + methods is more efficient than calling module-level functions with the + same format since the format string is only compiled once. .. note:: diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 51b9e38b7b6ce9..14414ea7f81ea3 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -81,8 +81,10 @@ underlying :class:`Popen` interface can be used directly. If *env* is not ``None``, it must be a mapping that defines the environment variables for the new process; these are used instead of the default - behavior of inheriting the current process' environment. It is passed directly - to :class:`Popen`. + behavior of inheriting the current process' environment. It is passed + directly to :class:`Popen`. This mapping can be str to str on any platform + or bytes to bytes on POSIX platforms much like :data:`os.environ` or + :data:`os.environb`. Examples:: @@ -619,7 +621,9 @@ functions. If *env* is not ``None``, it must be a mapping that defines the environment variables for the new process; these are used instead of the default - behavior of inheriting the current process' environment. + behavior of inheriting the current process' environment. This mapping can be + str to str on any platform or bytes to bytes on POSIX platforms much like + :data:`os.environ` or :data:`os.environb`. .. note:: diff --git a/Doc/library/superseded.rst b/Doc/library/superseded.rst index 57ef9638d058d4..8786e227be9182 100644 --- a/Doc/library/superseded.rst +++ b/Doc/library/superseded.rst @@ -11,8 +11,6 @@ backwards compatibility. They have been superseded by other modules. .. toctree:: aifc.rst - asynchat.rst - asyncore.rst audioop.rst cgi.rst cgitb.rst diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index f3fd16c4de7537..428ce51165c9b5 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -35,6 +35,15 @@ always available. can then log the event, raise an exception to abort the operation, or terminate the process entirely. + Note that audit hooks are primarily for collecting information about internal + or otherwise unobservable actions, whether by Python or libraries written in + Python. They are not suitable for implementing a "sandbox". In particular, + malicious code can trivially disable or bypass hooks added using this + function. At a minimum, any security-sensitive hooks must be added using the + C API :c:func:`PySys_AddAuditHook` before initialising the runtime, and any + modules allowing arbitrary memory modification (such as :mod:`ctypes`) should + be completely removed or closely monitored. + .. audit-event:: sys.addaudithook "" sys.addaudithook Calling :func:`sys.addaudithook` will itself raise an auditing event @@ -595,12 +604,18 @@ always available. +---------------------+----------------+--------------------------------------------------+ | :const:`radix` | FLT_RADIX | radix of exponent representation | +---------------------+----------------+--------------------------------------------------+ - | :const:`rounds` | FLT_ROUNDS | integer constant representing the rounding mode | - | | | used for arithmetic operations. This reflects | - | | | the value of the system FLT_ROUNDS macro at | - | | | interpreter startup time. See section 5.2.4.2.2 | - | | | of the C99 standard for an explanation of the | - | | | possible values and their meanings. | + | :const:`rounds` | FLT_ROUNDS | integer representing the rounding mode for | + | | | floating-point arithmetic. This reflects the | + | | | value of the system FLT_ROUNDS macro at | + | | | interpreter startup time: | + | | | ``-1`` indeterminable, | + | | | ``0`` toward zero, | + | | | ``1`` to nearest, | + | | | ``2`` toward positive infinity, | + | | | ``3`` toward negative infinity | + | | | | + | | | All other values for FLT_ROUNDS characterize | + | | | implementation-defined rounding behavior. | +---------------------+----------------+--------------------------------------------------+ The attribute :attr:`sys.float_info.dig` needs further explanation. If diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index 024988777030f8..839c2c015b49ae 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -121,7 +121,7 @@ identifier. Python currently uses eight paths: Return the default scheme name for the current platform. - .. versionchanged:: 3.10 + .. versionadded:: 3.10 This function was previously named ``_get_default_scheme()`` and considered an implementation detail. diff --git a/Doc/library/syslog.rst b/Doc/library/syslog.rst index 766ff57cc66d69..f29ef03267b1ba 100644 --- a/Doc/library/syslog.rst +++ b/Doc/library/syslog.rst @@ -40,6 +40,13 @@ The module defines the following functions: it wasn't called prior to the call to :func:`syslog`, deferring to the syslog implementation to call ``openlog()``. + .. versionchanged:: 3.12 + This function is restricted in subinterpreters. + (Only code that runs in multiple interpreters is affected and + the restriction is not relevant for most users.) + :func:`openlog` must be called in the main interpreter before :func:`syslog` may be used + in a subinterpreter. Otherwise it will raise :exc:`RuntimeError`. + .. function:: openlog([ident[, logoption[, facility]]]) @@ -60,6 +67,13 @@ The module defines the following functions: In previous versions, keyword arguments were not allowed, and *ident* was required. + .. versionchanged:: 3.12 + This function is restricted in subinterpreters. + (Only code that runs in multiple interpreters is affected and + the restriction is not relevant for most users.) + This may only be called in the main interpreter. + It will raise :exc:`RuntimeError` if called in a subinterpreter. + .. function:: closelog() @@ -72,6 +86,13 @@ The module defines the following functions: .. audit-event:: syslog.closelog "" syslog.closelog + .. versionchanged:: 3.12 + This function is restricted in subinterpreters. + (Only code that runs in multiple interpreters is affected and + the restriction is not relevant for most users.) + This may only be called in the main interpreter. + It will raise :exc:`RuntimeError` if called in a subinterpreter. + .. function:: setlogmask(maskpri) diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 8c8286059d531e..356f919a1897b2 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -1339,7 +1339,7 @@ These are not used in annotations. They are building blocks for creating generic ``Unpack[Ts]``.) Type variable tuples must *always* be unpacked. This helps distinguish type - variable types from normal type variables:: + variable tuples from normal type variables:: x: Ts # Not valid x: tuple[Ts] # Not valid @@ -2009,7 +2009,7 @@ Other concrete types represent the types of I/O streams such as returned by :func:`open`. - .. deprecated-removed:: 3.8 3.12 + .. deprecated-removed:: 3.8 3.13 The ``typing.io`` namespace is deprecated and will be removed. These types should be directly imported from ``typing`` instead. @@ -2023,7 +2023,7 @@ Other concrete types ``Pattern[str]``, ``Pattern[bytes]``, ``Match[str]``, or ``Match[bytes]``. - .. deprecated-removed:: 3.8 3.12 + .. deprecated-removed:: 3.8 3.13 The ``typing.re`` namespace is deprecated and will be removed. These types should be directly imported from ``typing`` instead. @@ -2575,6 +2575,10 @@ Functions and decorators assumed to be True or False if it is omitted by the caller. * ``kw_only_default`` indicates whether the ``kw_only`` parameter is assumed to be True or False if it is omitted by the caller. + * ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + + .. versionadded:: 3.12 * ``field_specifiers`` specifies a static list of supported classes or functions that describe fields, similar to ``dataclasses.field()``. * Arbitrary other keyword arguments are accepted in order to allow for @@ -2868,7 +2872,7 @@ convenience. This is subject to change, and not all deprecations are listed. +----------------------------------+---------------+-------------------+----------------+ | Feature | Deprecated in | Projected removal | PEP/issue | +==================================+===============+===================+================+ -| ``typing.io`` and ``typing.re`` | 3.8 | 3.12 | :issue:`38291` | +| ``typing.io`` and ``typing.re`` | 3.8 | 3.13 | :issue:`38291` | | submodules | | | | +----------------------------------+---------------+-------------------+----------------+ | ``typing`` versions of standard | 3.9 | Undecided | :pep:`585` | diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index b768557e6075f6..e009f303fef317 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1604,6 +1604,7 @@ decorator: >>> @patch.dict(foo, {'newkey': 'newvalue'}) ... def test(): ... assert foo == {'newkey': 'newvalue'} + ... >>> test() >>> assert foo == {} diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index adc6cd339ac157..2a41096de006b8 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -497,76 +497,68 @@ subclass which installs setuptools and pip into a created virtual environment:: url = 'https://bootstrap.pypa.io/get-pip.py' self.install_script(context, 'pip', url) + def main(args=None): - compatible = True - if sys.version_info < (3, 3): - compatible = False - elif not hasattr(sys, 'base_prefix'): - compatible = False - if not compatible: - raise ValueError('This script is only for use with ' - 'Python 3.3 or later') + import argparse + + parser = argparse.ArgumentParser(prog=__name__, + description='Creates virtual Python ' + 'environments in one or ' + 'more target ' + 'directories.') + parser.add_argument('dirs', metavar='ENV_DIR', nargs='+', + help='A directory in which to create the ' + 'virtual environment.') + parser.add_argument('--no-setuptools', default=False, + action='store_true', dest='nodist', + help="Don't install setuptools or pip in the " + "virtual environment.") + parser.add_argument('--no-pip', default=False, + action='store_true', dest='nopip', + help="Don't install pip in the virtual " + "environment.") + parser.add_argument('--system-site-packages', default=False, + action='store_true', dest='system_site', + help='Give the virtual environment access to the ' + 'system site-packages dir.') + if os.name == 'nt': + use_symlinks = False else: - import argparse - - parser = argparse.ArgumentParser(prog=__name__, - description='Creates virtual Python ' - 'environments in one or ' - 'more target ' - 'directories.') - parser.add_argument('dirs', metavar='ENV_DIR', nargs='+', - help='A directory in which to create the ' - 'virtual environment.') - parser.add_argument('--no-setuptools', default=False, - action='store_true', dest='nodist', - help="Don't install setuptools or pip in the " - "virtual environment.") - parser.add_argument('--no-pip', default=False, - action='store_true', dest='nopip', - help="Don't install pip in the virtual " - "environment.") - parser.add_argument('--system-site-packages', default=False, - action='store_true', dest='system_site', - help='Give the virtual environment access to the ' - 'system site-packages dir.') - if os.name == 'nt': - use_symlinks = False - else: - use_symlinks = True - parser.add_argument('--symlinks', default=use_symlinks, - action='store_true', dest='symlinks', - help='Try to use symlinks rather than copies, ' - 'when symlinks are not the default for ' - 'the platform.') - parser.add_argument('--clear', default=False, action='store_true', - dest='clear', help='Delete the contents of the ' - 'virtual environment ' - 'directory if it already ' - 'exists, before virtual ' - 'environment creation.') - parser.add_argument('--upgrade', default=False, action='store_true', - dest='upgrade', help='Upgrade the virtual ' - 'environment directory to ' - 'use this version of ' - 'Python, assuming Python ' - 'has been upgraded ' - 'in-place.') - parser.add_argument('--verbose', default=False, action='store_true', - dest='verbose', help='Display the output ' - 'from the scripts which ' - 'install setuptools and pip.') - options = parser.parse_args(args) - if options.upgrade and options.clear: - raise ValueError('you cannot supply --upgrade and --clear together.') - builder = ExtendedEnvBuilder(system_site_packages=options.system_site, - clear=options.clear, - symlinks=options.symlinks, - upgrade=options.upgrade, - nodist=options.nodist, - nopip=options.nopip, - verbose=options.verbose) - for d in options.dirs: - builder.create(d) + use_symlinks = True + parser.add_argument('--symlinks', default=use_symlinks, + action='store_true', dest='symlinks', + help='Try to use symlinks rather than copies, ' + 'when symlinks are not the default for ' + 'the platform.') + parser.add_argument('--clear', default=False, action='store_true', + dest='clear', help='Delete the contents of the ' + 'virtual environment ' + 'directory if it already ' + 'exists, before virtual ' + 'environment creation.') + parser.add_argument('--upgrade', default=False, action='store_true', + dest='upgrade', help='Upgrade the virtual ' + 'environment directory to ' + 'use this version of ' + 'Python, assuming Python ' + 'has been upgraded ' + 'in-place.') + parser.add_argument('--verbose', default=False, action='store_true', + dest='verbose', help='Display the output ' + 'from the scripts which ' + 'install setuptools and pip.') + options = parser.parse_args(args) + if options.upgrade and options.clear: + raise ValueError('you cannot supply --upgrade and --clear together.') + builder = ExtendedEnvBuilder(system_site_packages=options.system_site, + clear=options.clear, + symlinks=options.symlinks, + upgrade=options.upgrade, + nodist=options.nodist, + nopip=options.nopip, + verbose=options.verbose) + for d in options.dirs: + builder.create(d) if __name__ == '__main__': rc = 1 diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index a1e542b1e927e4..73e7b21ae405d2 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -143,7 +143,7 @@ See :ref:`__slots__ documentation <slots>` for details. ``ProxyType`` or ``CallableProxyType``, depending on whether *object* is callable. Proxy objects are not :term:`hashable` regardless of the referent; this avoids a number of problems related to their fundamentally mutable nature, and - prevent their use as dictionary keys. *callback* is the same as the parameter + prevents their use as dictionary keys. *callback* is the same as the parameter of the same name to the :func:`ref` function. Accessing an attribute of the proxy object after the referent is @@ -212,7 +212,7 @@ objects. discarded when no strong reference to it exists any more. -.. class:: WeakMethod(method) +.. class:: WeakMethod(method[, callback]) A custom :class:`ref` subclass which simulates a weak reference to a bound method (i.e., a method defined on a class and looked up on an instance). @@ -238,6 +238,8 @@ objects. >>> r() >>> + *callback* is the same as the parameter of the same name to the :func:`ref` function. + .. versionadded:: 3.4 .. class:: finalize(obj, func, /, *args, **kwargs) diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst index 06223e667a450a..75dea466335163 100644 --- a/Doc/library/wsgiref.rst +++ b/Doc/library/wsgiref.rst @@ -7,6 +7,8 @@ .. moduleauthor:: Phillip J. Eby <pje@telecommunity.com> .. sectionauthor:: Phillip J. Eby <pje@telecommunity.com> +**Source code:** :source:`Lib/wsgiref` + -------------- The Web Server Gateway Interface (WSGI) is a standard interface between web diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 2fe0d2e082fb3a..876de29b17ca3c 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -1212,6 +1212,7 @@ Example of changing the attribute "target" of every link in first paragraph:: [<Element 'a' at 0xb77ec2ac>, <Element 'a' at 0xb77ec1cc>] >>> for i in links: # Iterates through all found links ... i.attrib["target"] = "blank" + ... >>> tree.write("output.xhtml") .. _elementtree-qname-objects: diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst index 4dd9fa961a8d98..82709dbc92496d 100644 --- a/Doc/library/zipfile.rst +++ b/Doc/library/zipfile.rst @@ -672,6 +672,7 @@ The :class:`PyZipFile` constructor takes the same parameters as the >>> def notests(s): ... fn = os.path.basename(s) ... return (not (fn == 'test' or fn.startswith('test_'))) + ... >>> zf.writepy('myprog', filterfunc=notests) The :meth:`writepy` method makes archives with file names like diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst index 2f1879dc056a88..d2e5619e7e47c2 100644 --- a/Doc/library/zoneinfo.rst +++ b/Doc/library/zoneinfo.rst @@ -9,6 +9,8 @@ .. moduleauthor:: Paul Ganssle <paul@ganssle.io> .. sectionauthor:: Paul Ganssle <paul@ganssle.io> +**Source code:** :source:`Lib/zoneinfo` + -------------- The :mod:`zoneinfo` module provides a concrete time zone implementation to diff --git a/Doc/license.rst b/Doc/license.rst index 92059b91effd9b..a934c60698f0b3 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -387,7 +387,8 @@ Project, https://www.wide.ad.jp/. :: Asynchronous socket services ---------------------------- -The :mod:`asynchat` and :mod:`asyncore` modules contain the following notice:: +The :mod:`test.support.asynchat` and :mod:`test.support.asyncore` +modules contain the following notice:: Copyright 1996 by Sam Rushing @@ -988,9 +989,12 @@ https://www.w3.org/TR/xml-c14n2-testcases/ and is distributed under the (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Audioop ------- + The audioop module uses the code base in g771.c file of the SoX project:: + Programming the AdLib/Sound Blaster FM Music Chips Version 2.0 (24 Feb 1992) diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index c3c78119958e79..9e09515f50d12f 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -593,6 +593,7 @@ The :keyword:`!match` statement keyword: if keyword: as pair: match; case + single: as; match statement single: : (colon); compound statement .. versionadded:: 3.10 diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 301f41f3952c96..fd682fcff02003 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -2823,7 +2823,7 @@ Customizing positional arguments in class pattern matching When using a class name in a pattern, positional arguments in the pattern are not allowed by default, i.e. ``case MyClass(x, y)`` is typically invalid without special -support in ``MyClass``. To be able to use that kind of patterns, the class needs to +support in ``MyClass``. To be able to use that kind of pattern, the class needs to define a *__match_args__* attribute. .. data:: object.__match_args__ diff --git a/Doc/reference/grammar.rst b/Doc/reference/grammar.rst index 59b45005836a76..bc1db7b039cd5a 100644 --- a/Doc/reference/grammar.rst +++ b/Doc/reference/grammar.rst @@ -12,7 +12,7 @@ and `PEG <https://en.wikipedia.org/wiki/Parsing_expression_grammar>`_. In particular, ``&`` followed by a symbol, token or parenthesized group indicates a positive lookahead (i.e., is required to match but not consumed), while ``!`` indicates a negative lookahead (i.e., is -required _not_ to match). We use the ``|`` separator to mean PEG's +required *not* to match). We use the ``|`` separator to mean PEG's "ordered choice" (written as ``/`` in traditional PEG grammars). See :pep:`617` for more details on the grammar's syntax. diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index 4ab6e90a623449..8adb4b740825d0 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -612,9 +612,13 @@ Notes: As in Standard C, up to three octal digits are accepted. .. versionchanged:: 3.11 - Octal escapes with value larger than ``0o377`` produce a :exc:`DeprecationWarning`. - In a future Python version they will be a :exc:`SyntaxWarning` and - eventually a :exc:`SyntaxError`. + Octal escapes with value larger than ``0o377`` produce a + :exc:`DeprecationWarning`. + + .. versionchanged:: 3.12 + Octal escapes with value larger than ``0o377`` produce a + :exc:`SyntaxWarning`. In a future Python version they will be eventually + a :exc:`SyntaxError`. (3) Unlike in Standard C, exactly two hex digits are required. @@ -646,9 +650,11 @@ escape sequences only recognized in string literals fall into the category of unrecognized escapes for bytes literals. .. versionchanged:: 3.6 - Unrecognized escape sequences produce a :exc:`DeprecationWarning`. In - a future Python version they will be a :exc:`SyntaxWarning` and - eventually a :exc:`SyntaxError`. + Unrecognized escape sequences produce a :exc:`DeprecationWarning`. + + .. versionchanged:: 3.12 + Unrecognized escape sequences produce a :exc:`SyntaxWarning`. In a future + Python version they will be eventually a :exc:`SyntaxError`. Even in a raw literal, quotes can be escaped with a backslash, but the backslash remains in the result; for example, ``r"\""`` is a valid string diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 5c9937fb5b6d72..c98ac81e415b72 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -330,7 +330,7 @@ statement, of a variable or attribute annotation and an optional assignment stat annotated_assignment_stmt: `augtarget` ":" `expression` : ["=" (`starred_expression` | `yield_expression`)] -The difference from normal :ref:`assignment` is that only single target is allowed. +The difference from normal :ref:`assignment` is that only a single target is allowed. For simple names as assignment targets, if in class or module scope, the annotations are evaluated and stored in a special class or module @@ -365,8 +365,8 @@ target, then the interpreter evaluates the target except for the last IDEs. .. versionchanged:: 3.8 - Now annotated assignments allow same expressions in the right hand side as - the regular assignments. Previously, some expressions (like un-parenthesized + Now annotated assignments allow the same expressions in the right hand side as + regular assignments. Previously, some expressions (like un-parenthesized tuple expressions) caused a syntax error. @@ -756,7 +756,7 @@ commas) the two steps are carried out separately for each clause, just as though the clauses had been separated out into individual import statements. -The details of the first step, finding and loading modules are described in +The details of the first step, finding and loading modules, are described in greater detail in the section on the :ref:`import system <importsystem>`, which also describes the various types of packages and modules that can be imported, as well as all the hooks that can be used to customize diff --git a/Doc/requirements.txt b/Doc/requirements.txt index 958665db69e227..134f39d6d7b3d4 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -8,6 +8,7 @@ sphinx==4.5.0 blurb sphinx-lint==0.6.7 +sphinxext-opengraph>=0.7.1 # The theme used by the documentation is stored separately, so we need # to install that as well. diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 3b9f7442f75bcc..db7bb3b44219d2 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -26,7 +26,7 @@ from sphinx.errors import NoUri except ImportError: from sphinx.environment import NoUri -from sphinx.locale import translators +from sphinx.locale import _ as sphinx_gettext from sphinx.util import status_iterator, logging from sphinx.util.nodes import split_explicit_title from sphinx.writers.text import TextWriter, TextTranslator @@ -104,7 +104,7 @@ class ImplementationDetail(Directive): def run(self): self.assert_has_content() pnode = nodes.compound(classes=['impl-detail']) - label = translators['sphinx'].gettext(self.label_text) + label = sphinx_gettext(self.label_text) content = self.content add_text = nodes.strong(label, label) self.state.nested_parse(content, self.content_offset, pnode) @@ -252,7 +252,7 @@ def run(self): else: args = [] - label = translators['sphinx'].gettext(self._label[min(2, len(args))]) + label = sphinx_gettext(self._label[min(2, len(args))]) text = label.format(name="``{}``".format(name), args=", ".join("``{}``".format(a) for a in args if a)) @@ -431,7 +431,7 @@ def run(self): else: label = self._removed_label - label = translators['sphinx'].gettext(label) + label = sphinx_gettext(label) text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) if len(self.arguments) == 3: inodes, messages = self.state.inline_text(self.arguments[2], diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html index 98ccf4224804b2..460161cd320223 100644 --- a/Doc/tools/templates/layout.html +++ b/Doc/tools/templates/layout.html @@ -8,6 +8,19 @@ <a href="/3/{{ pagename }}{{ file_suffix }}">{% trans %} Python documentation for the current stable release{% endtrans %}</a>. </div> {%- endif %} + +{%- if is_deployment_preview %} +<div id="deployment-preview-warning" style="padding: .5em; text-align: center; background-color: #fff2ba; color: #6a580e;"> + <div style="float: right; margin-top: -10px; margin-left: 10px;"> + <a href="https://www.netlify.com"> + <img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" /> + </a> + </div> + {% trans %}This is a deploy preview created from a <a href="{{ repository_url }}/pull/{{ pr_id }}">pull request</a>. + For authoritative documentation, see the {% endtrans %} + <a href="https://docs.python.org/3/{{ pagename }}{{ file_suffix }}">{% trans %} the current stable release{% endtrans %}</a>. +</div> +{%- endif %} {% endblock %} {% block rootrellink %} diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst index 9ecbf8b87efbf1..0e5a9402bc50e3 100644 --- a/Doc/tutorial/classes.rst +++ b/Doc/tutorial/classes.rst @@ -119,12 +119,12 @@ directly accessible: * the innermost scope, which is searched first, contains the local names * the scopes of any enclosing functions, which are searched starting with the - nearest enclosing scope, contains non-local, but also non-global names + nearest enclosing scope, contain non-local, but also non-global names * the next-to-last scope contains the current module's global names * the outermost scope (searched last) is the namespace containing built-in names If a name is declared global, then all references and assignments go directly to -the middle scope containing the module's global names. To rebind variables +the next-to-last scope containing the module's global names. To rebind variables found outside of the innermost scope, the :keyword:`nonlocal` statement can be used; if not declared nonlocal, those variables are read-only (an attempt to write to such a variable will simply create a *new* local variable in the diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst index 12b00be3793e14..c8e89d9b79bddd 100644 --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -122,7 +122,7 @@ An example that uses most of the list methods:: You might have noticed that methods like ``insert``, ``remove`` or ``sort`` that only modify the list have no return value printed -- they return the default -``None``. [1]_ This is a design principle for all mutable data structures in +``None``. [#]_ This is a design principle for all mutable data structures in Python. Another thing you might notice is that not all data can be sorted or @@ -731,5 +731,5 @@ interpreter will raise a :exc:`TypeError` exception. .. rubric:: Footnotes -.. [1] Other languages may return the mutated object, which allows method +.. [#] Other languages may return the mutated object, which allows method chaining, such as ``d->insert("a")->remove("b")->sort();``. diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst index 67bb19556681c0..e09c829b8e9721 100644 --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -496,7 +496,7 @@ Raising and Handling Multiple Unrelated Exceptions ================================================== There are situations where it is necessary to report several exceptions that -have occurred. This it often the case in concurrency frameworks, when several +have occurred. This is often the case in concurrency frameworks, when several tasks may have failed in parallel, but there are also other use cases where it is desirable to continue execution and collect multiple errors rather than raise the first exception. diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index b99d9bdaa3f007..db4bf7412292bc 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -2,6 +2,47 @@ Configure Python **************** +Build Requirements +================== + +Features required to build CPython: + +* A `C11 <https://en.cppreference.com/w/c/11>`_ compiler. `Optional C11 + features + <https://en.wikipedia.org/wiki/C11_(C_standard_revision)#Optional_features>`_ + are not required. + +* Support for `IEEE 754 <https://en.wikipedia.org/wiki/IEEE_754>`_ floating + point numbers and `floating point Not-a-Number (NaN) + <https://en.wikipedia.org/wiki/NaN#Floating_point>`_. + +* Support for threads. + +* OpenSSL 1.1.1 or newer for the :mod:`ssl` and :mod:`hashlib` modules. + +* On Windows, Microsoft Visual Studio 2017 or later is required. + +.. versionchanged:: 3.11 + C11 compiler, IEEE 754 and NaN support are now required. + On Windows, Visual Studio 2017 or later is required. + +.. versionchanged:: 3.10 + OpenSSL 1.1.1 is now required. + +.. versionchanged:: 3.7 + Thread support and OpenSSL 1.0.2 are now required. + +.. versionchanged:: 3.6 + Selected C99 features are now required, like ``<stdint.h>`` and ``static + inline`` functions. + +.. versionchanged:: 3.5 + On Windows, Visual Studio 2015 or later is required. + +See also :pep:`7` "Style Guide for C Code" and :pep:`11` "CPython platform +support". + + .. _configure-options: Configure Options @@ -93,6 +134,12 @@ General Options See :envvar:`PYTHONCOERCECLOCALE` and the :pep:`538`. +.. cmdoption:: --without-freelists + + Disable all freelists except the empty tuple singleton. + + .. versionadded:: 3.11 + .. cmdoption:: --with-platlibdir=DIRNAME Python library directory name (default is ``lib``). @@ -131,7 +178,8 @@ General Options Turn on internal statistics gathering. The statistics will be dumped to a arbitrary (probably unique) file in - ``/tmp/py_stats/``, or ``C:\temp\py_stats\`` on Windows. + ``/tmp/py_stats/``, or ``C:\temp\py_stats\`` on Windows. If that directory + does not exist, results will be printed on stdout. Use ``Tools/scripts/summarize_stats.py`` to read the stats. @@ -766,6 +814,13 @@ Compiler flags .. versionadded:: 3.5 +.. envvar:: COMPILEALL_OPTS + + Options passed to the :mod:`compileall` command line when building PYC files + in ``make install``. Default: ``-j0``. + + .. versionadded:: 3.12 + .. envvar:: EXTRA_CFLAGS Extra C compiler flags. diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index 276ab63b97f8a9..810a2cd2537c34 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -1331,6 +1331,7 @@ changes, or look through the Subversion logs for all the details. >>> from inspect import getcallargs >>> def f(a, b=1, *pos, **named): ... pass + ... >>> getcallargs(f, 1, 2, 3) {'a': 1, 'b': 2, 'pos': (3,), 'named': {}} >>> getcallargs(f, a=2, x=4) diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 2389b1a63b1ca2..1c21caf355f082 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -77,8 +77,9 @@ Interpreter improvements: New typing features: * :pep:`604`, Allow writing union types as X | Y -* :pep:`613`, Explicit Type Aliases * :pep:`612`, Parameter Specification Variables +* :pep:`613`, Explicit Type Aliases +* :pep:`647`, User-Defined Type Guards Important deprecations, removals or restrictions: diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst index 6eb90df89cac0e..7931988ed0b04d 100644 --- a/Doc/whatsnew/3.11.rst +++ b/Doc/whatsnew/3.11.rst @@ -2131,7 +2131,7 @@ Build Changes (Contributed by Dong-hee Na and Brett Holman in :issue:`44340`.) * Freelists for object structs can now be disabled. A new :program:`configure` - option :option:`!--without-freelists` can be used to disable all freelists + option :option:`--without-freelists` can be used to disable all freelists except empty tuple singleton. (Contributed by Christian Heimes in :issue:`45522`.) diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 18751f5a8c0b85..73dc462f0b3303 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -70,6 +70,60 @@ Important deprecations, removals or restrictions: * :pep:`623`, Remove wstr from Unicode +* :pep:`632`, Remove the ``distutils`` package. + +Improved Error Messages +======================= + +* Modules from the standard library are now potentially suggested as part of + the error messages displayed by the interpreter when a :exc:`NameError` is + raised to the top level. Contributed by Pablo Galindo in :gh:`98254`. + + >>> sys.version_info + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + NameError: name 'sys' is not defined. Did you forget to import 'sys'? + +* Improve the error suggestion for :exc:`NameError` exceptions for instances. + Now if a :exc:`NameError` is raised in a method and the instance has an + attribute that's exactly equal to the name in the exception, the suggestion + will include ``self.<NAME>`` instead of the closest match in the method + scope. Contributed by Pablo Galindo in :gh:`99139`. + + >>> class A: + ... def __init__(self): + ... self.blech = 1 + ... + ... def foo(self): + ... somethin = blech + + >>> A().foo() + Traceback (most recent call last): + File "<stdin>", line 1 + somethin = blech + ^^^^^ + NameError: name 'blech' is not defined. Did you mean: 'self.blech'? + + +* Improve the :exc:`SyntaxError` error message when the user types ``import x + from y`` instead of ``from y import x``. Contributed by Pablo Galindo in :gh:`98931`. + + >>> import a.y.z from b.y.z + Traceback (most recent call last): + File "<stdin>", line 1 + import a.y.z from b.y.z + ^^^^^^^^^^^^^^^^^^^^^^^ + SyntaxError: Did you mean to use 'from ... import ...' instead? + +* :exc:`ImportError` exceptions raised from failed ``from <module> import + <name>`` statements now include suggestions for the value of ``<name>`` based on the + available names in ``<module>``. Contributed by Pablo Galindo in :gh:`91058`. + + >>> from collections import chainmap + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + ImportError: cannot import name 'chainmap' from 'collections'. Did you mean: 'ChainMap'? + New Features ============ @@ -109,6 +163,26 @@ Other Language Changes chance to execute the GC periodically. (Contributed by Pablo Galindo in :gh:`97922`.) +* A backslash-character pair that is not a valid escape sequence now generates + a :exc:`SyntaxWarning`, instead of :exc:`DeprecationWarning`. + For example, ``re.compile("\d+\.\d+")`` now emits a :exc:`SyntaxWarning` + (``"\d"`` is an invalid escape sequence), use raw strings for regular + expression: ``re.compile(r"\d+\.\d+")``. + In a future Python version, :exc:`SyntaxError` will eventually be raised, + instead of :exc:`SyntaxWarning`. + (Contributed by Victor Stinner in :gh:`98401`.) + +* Octal escapes with value larger than ``0o377`` (ex: ``"\477"``), deprecated + in Python 3.11, now produce a :exc:`SyntaxWarning`, instead of + :exc:`DeprecationWarning`. + In a future Python version they will be eventually a :exc:`SyntaxError`. + (Contributed by Victor Stinner in :gh:`98401`.) + +* All builtin and extension callables expecting boolean parameters now accept + arguments of any type instead of just :class:`bool` and :class:`int`. + (Contributed by Serhiy Storchaka in :gh:`60203`.) + + New Modules =========== @@ -133,8 +207,8 @@ asyncio (Contributed by Kumar Aditya in :gh:`98024`.) * The child watcher classes :class:`~asyncio.MultiLoopChildWatcher`, - :class:`~asyncio.FastChildWatcher` and - :class:`~asyncio.SafeChildWatcher` are deprecated and + :class:`~asyncio.FastChildWatcher`, :class:`~asyncio.AbstractChildWatcher` + and :class:`~asyncio.SafeChildWatcher` are deprecated and will be removed in Python 3.14. It is recommended to not manually configure a child watcher as the event loop now uses the best available child watcher for each platform (:class:`~asyncio.PidfdChildWatcher` @@ -147,6 +221,11 @@ asyncio and will be removed in Python 3.14. (Contributed by Kumar Aditya in :gh:`94597`.) +* Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying + a custom event loop factory. + (Contributed by Kumar Aditya in :gh:`99388`.) + + pathlib ------- @@ -159,6 +238,10 @@ pathlib more consistent with :func:`os.path.relpath`. (Contributed by Domenico Ragusa in :issue:`40358`.) +* Add :meth:`pathlib.Path.is_junction` as a proxy to :func:`os.path.isjunction`. + (Contributed by Charles Machalow in :gh:`99547`.) + + dis --- @@ -177,6 +260,14 @@ os for a process with :func:`os.pidfd_open` in non-blocking mode. (Contributed by Kumar Aditya in :gh:`93312`.) +* Add :func:`os.path.isjunction` to check if a given path is a junction. + (Contributed by Charles Machalow in :gh:`99547`.) + +* :class:`os.DirEntry` now includes an :meth:`os.DirEntry.is_junction` + method to check if the entry is a junction. + (Contributed by Charles Machalow in :gh:`99547`.) + + shutil ------ @@ -193,6 +284,13 @@ sqlite3 * Add a :ref:`command-line interface <sqlite3-cli>`. (Contributed by Erlend E. Aasland in :gh:`77617`.) +* Add the :attr:`~sqlite3.Connection.autocommit` attribute + to :class:`~sqlite3.Connection` + and the *autocommit* parameter to :func:`~sqlite3.connect` + to control :pep:`249`-compliant + :ref:`transaction handling <sqlite3-transaction-control-autocommit>`. + (Contributed by Erlend E. Aasland in :gh:`83638`.) + threading --------- @@ -373,6 +471,12 @@ although there is currently no date scheduled for their removal. Removed ======= +* Remove the ``distutils`` package. It was deprecated in Python 3.10 by + :pep:`632` "Deprecate distutils module". For projects still using + ``distutils`` and cannot be updated to something else, the ``setuptools`` + project can be installed: it still provides ``distutils``. + (Contributed by Victor Stinner in :gh:`92584`.) + * Removed many old deprecated :mod:`unittest` features: - A number of :class:`~unittest.TestCase` method aliases: @@ -446,6 +550,12 @@ Removed .. _aiosmtpd: https://pypi.org/project/aiosmtpd/ +* ``asynchat`` and ``asyncore`` have been removed + according to the schedule in :pep:`594`, + having been deprecated in Python 3.6. + Use :mod:`asyncio` instead. + (Contributed by Nikita Sobolev in :gh:`96580`.) + * Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) function is a built-in function. Since Python 3.10, :func:`_pyio.open` is @@ -512,6 +622,16 @@ Removed <https://github.com/sphinx-contrib/sphinx-lint>`_. (Contributed by Julien Palard in :gh:`98179`.) +* Remove the *keyfile*, *certfile* and *check_hostname* parameters, deprecated + since Python 3.6, in modules: :mod:`ftplib`, :mod:`http.client`, + :mod:`imaplib`, :mod:`poplib` and :mod:`smtplib`. Use the *context* parameter + (*ssl_context* in :mod:`imaplib`) instead. + (Contributed by Victor Stinner in :gh:`94172`.) + +* :mod:`ftplib`: Remove the ``FTP_TLS.ssl_version`` class attribute: use the + *context* parameter instead. + (Contributed by Victor Stinner in :gh:`94172`.) + Porting to Python 3.12 ====================== @@ -557,6 +677,27 @@ Changes in the Python API :class:`bytes` type is accepted for bytes strings. (Contributed by Victor Stinner in :gh:`98393`.) +* :func:`syslog.openlog` and :func:`syslog.closelog` now fail if used in subinterpreters. + :func:`syslog.syslog` may still be used in subinterpreters, + but now only if :func:`syslog.openlog` has already been called in the main interpreter. + These new restrictions do not apply to the main interpreter, + so only a very small set of users might be affected. + This change helps with interpreter isolation. Furthermore, :mod:`syslog` is a wrapper + around process-global resources, which are best managed from the main interpreter. + (Contributed by Dong-hee Na in :gh:`99127`.) + +* :func:`asyncio.get_event_loop` and many other :mod:`asyncio` functions like + :func:`~asyncio.ensure_future`, :func:`~asyncio.shield` or + :func:`~asyncio.gather`, and also the + :meth:`~asyncio.BaseDefaultEventLoopPolicy.get_event_loop` method of + :class:`~asyncio.BaseDefaultEventLoopPolicy` now raise a :exc:`RuntimeError` + if called when there is no running event loop and the current event loop was + not set. + Previously they implicitly created and set a new current event loop. + :exc:`DeprecationWarning` is no longer emitted if there is no running + event loop but the current event loop is set in the policy. + (Contributed by Serhiy Storchaka in :gh:`93453`.) + Build Changes ============= @@ -576,6 +717,12 @@ Build Changes if the Clang compiler accepts the flag. (Contributed by Dong-hee Na in :gh:`89536`.) +* Add ``COMPILEALL_OPTS`` variable in Makefile to override :mod:`compileall` + options (default: ``-j0``) in ``make install``. Also merged the 3 + ``compileall`` commands into a single command to build .pyc files for all + optimization levels (0, 1, 2) at once. + (Contributed by Victor Stinner in :gh:`99289`.) + C API Changes ============= @@ -610,6 +757,18 @@ New Features ``__dict__`` and weakrefs with less bookkeeping, using less memory and with faster access. +* API for performing calls using + :ref:`the vectorcall protocol <vectorcall>` was added to the + :ref:`Limited API <stable>`: + + * :c:func:`PyObject_Vectorcall` + * :c:func:`PyObject_VectorcallMethod` + * :const:`PY_VECTORCALL_ARGUMENTS_OFFSET` + + This means that both the incoming and outgoing ends of the vector call + protocol are now available in the :ref:`Limited API <stable>`. (Contributed + by Wenzel Jakob in :gh:`98586`.) + * Added two new public functions, :c:func:`PyEval_SetProfileAllThreads` and :c:func:`PyEval_SetTraceAllThreads`, that allow to set tracing and profiling @@ -630,6 +789,14 @@ New Features callbacks to receive notification on changes to a type. (Contributed by Carl Meyer in :gh:`91051`.) +* Added :c:func:`PyCode_AddWatcher` and :c:func:`PyCode_ClearWatcher` + APIs to register callbacks to receive notification on creation and + destruction of code objects. + (Contributed by Itamar Ostricher in :gh:`91054`.) + +* Add :c:func:`PyFrame_GetVar` and :c:func:`PyFrame_GetVarString` functions to + get a frame variable by its name. + (Contributed by Victor Stinner in :gh:`91248`.) Porting to Python 3.12 ---------------------- @@ -684,6 +851,11 @@ Porting to Python 3.12 :class:`bytes` type is accepted for bytes strings. (Contributed by Victor Stinner in :gh:`98393`.) +* The :c:macro:`Py_CLEAR`, :c:macro:`Py_SETREF` and :c:macro:`Py_XSETREF` + macros now only evaluate their arguments once. If an argument has side + effects, these side effects are no longer duplicated. + (Contributed by Victor Stinner in :gh:`98724`.) + Deprecated ---------- @@ -718,6 +890,37 @@ Deprecated * Creating :c:data:`immutable types <Py_TPFLAGS_IMMUTABLETYPE>` with mutable bases is deprecated and will be disabled in Python 3.14. +* The ``structmember.h`` header is deprecated, though it continues to be + available and there are no plans to remove it. + + Its contents are now available just by including ``Python.h``, + with a ``Py`` prefix added if it was missing: + + - :c:struct:`PyMemberDef`, :c:func:`PyMember_GetOne` and + :c:func:`PyMember_SetOne` + - Type macros like :c:macro:`Py_T_INT`, :c:macro:`Py_T_DOUBLE`, etc. + (previously ``T_INT``, ``T_DOUBLE``, etc.) + - The flags :c:macro:`Py_READONLY` (previously ``READONLY``) and + :c:macro:`Py_AUDIT_READ` (previously all uppercase) + + Several items are not exposed from ``Python.h``: + + - :c:macro:`T_OBJECT` (use :c:macro:`Py_T_OBJECT_EX`) + - :c:macro:`T_NONE` (previously undocumented, and pretty quirky) + - The macro ``WRITE_RESTRICTED`` which does nothing. + - The macros ``RESTRICTED`` and ``READ_RESTRICTED``, equivalents of + :c:macro:`Py_AUDIT_READ`. + - In some configurations, ``<stddef.h>`` is not included from ``Python.h``. + It should be included manually when using ``offsetof()``. + + The deprecated header continues to provide its original + contents under the original names. + Your old code can stay unchanged, unless the extra include and non-namespaced + macros bother you greatly. + + (Contributed in :gh:`47146` by Petr Viktorin, based on + earlier work by Alexander Belopolsky and Matthias Braun.) + Removed ------- @@ -742,3 +945,7 @@ Removed * Remove the ``PyUnicode_InternImmortal()`` function and the ``SSTATE_INTERNED_IMMORTAL`` macro. (Contributed by Victor Stinner in :gh:`85858`.) + +* Remove ``_use_broken_old_ctypes_structure_semantics_`` flag + from :mod:`ctypes` module. + (Contributed by Nikita Sobolev in :gh:`99285`.) diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst index 6037db9f954d26..1b1455b72b9291 100644 --- a/Doc/whatsnew/3.2.rst +++ b/Doc/whatsnew/3.2.rst @@ -468,6 +468,7 @@ Some smaller changes made to the core Python language are: >>> class LowerCasedDict(dict): ... def __getitem__(self, key): ... return dict.__getitem__(self, key.lower()) + ... >>> lcd = LowerCasedDict(part='widgets', quantity=10) >>> 'There are {QUANTITY} {Part} in stock'.format_map(lcd) 'There are 10 widgets in stock' @@ -475,6 +476,7 @@ Some smaller changes made to the core Python language are: >>> class PlaceholderDict(dict): ... def __missing__(self, key): ... return '<{}>'.format(key) + ... >>> 'Hello {name}, welcome to {location}'.format_map(PlaceholderDict()) 'Hello <name>, welcome to <location>' @@ -1886,6 +1888,7 @@ inspect >>> from inspect import getgeneratorstate >>> def gen(): ... yield 'demo' + ... >>> g = gen() >>> getgeneratorstate(g) 'GEN_CREATED' diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 96a632577b2c56..9e8d42469b019c 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -560,6 +560,7 @@ Example with (non-bound) methods:: >>> class C: ... def meth(self): ... pass + ... >>> C.meth.__name__ 'meth' >>> C.meth.__qualname__ diff --git a/Grammar/python.gram b/Grammar/python.gram index 439f08a2cd131b..2498251293e80e 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -194,7 +194,10 @@ yield_stmt[stmt_ty]: y=yield_expr { _PyAST_Expr(y, EXTRA) } assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _PyAST_Assert(a, b, EXTRA) } -import_stmt[stmt_ty]: import_name | import_from +import_stmt[stmt_ty]: + | invalid_import + | import_name + | import_from # Import statements # ----------------- @@ -954,6 +957,7 @@ kwargs[asdl_seq*]: | ','.kwarg_or_double_starred+ starred_expression[expr_ty]: + | invalid_starred_expression | '*' a=expression { _PyAST_Starred(a, Load, EXTRA) } kwarg_or_starred[KeywordOrStarred*]: @@ -1080,6 +1084,8 @@ invalid_arguments: RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, _PyPegen_get_last_comprehension_item(PyPegen_last_item(b, comprehension_ty)), "Generator expression must be parenthesized") } | a=NAME b='=' expression for_if_clauses { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?")} + | (args ',')? a=NAME b='=' &(',' | ')') { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "expected argument value expression")} | a=args b=for_if_clauses { _PyPegen_nonparen_genexp_in_call(p, a, b) } | args ',' a=expression b=for_if_clauses { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, _PyPegen_get_last_comprehension_item(PyPegen_last_item(b, comprehension_ty)), "Generator expression must be parenthesized") } @@ -1092,6 +1098,8 @@ invalid_kwarg: | !(NAME '=') a=expression b='=' { RAISE_SYNTAX_ERROR_KNOWN_RANGE( a, b, "expression cannot contain assignment, perhaps you meant \"==\"?") } + | a='**' expression '=' b=expression { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot assign to keyword argument unpacking") } # IMPORTANT: Note that the "_without_invalid" suffix causes the rule to not call invalid rules under it expression_without_invalid[expr_ty]: @@ -1230,6 +1238,10 @@ invalid_group: RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use starred expression here") } | '(' a='**' expression ')' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use double starred expression here") } +invalid_import: + | a='import' dotted_name 'from' dotted_name { + RAISE_SYNTAX_ERROR_STARTING_FROM(a, "Did you mean to use 'from ... import ...' instead?") } + invalid_import_from_targets: | import_from_as_names ',' NEWLINE { RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") } @@ -1247,8 +1259,10 @@ invalid_try_stmt: | a='try' ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block after 'try' statement on line %d", a->lineno) } | 'try' ':' block !('except' | 'finally') { RAISE_SYNTAX_ERROR("expected 'except' or 'finally' block") } - | 'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block* { - RAISE_SYNTAX_ERROR("cannot have both 'except' and 'except*' on the same 'try'") } + | 'try' ':' block* except_block+ a='except' b='*' expression ['as' NAME] ':' { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot have both 'except' and 'except*' on the same 'try'") } + | 'try' ':' block* except_star_block+ a='except' [expression ['as' NAME]] ':' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot have both 'except' and 'except*' on the same 'try'") } invalid_except_stmt: | 'except' '*'? a=expression ',' expressions ['as' NAME ] ':' { RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized") } @@ -1319,3 +1333,5 @@ invalid_kvpair: RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, a->lineno, a->end_col_offset - 1, a->end_lineno, -1, "':' expected after dictionary key") } | expression ':' a='*' bitwise_or { RAISE_SYNTAX_ERROR_STARTING_FROM(a, "cannot use a starred expression in a dictionary value") } | expression a=':' &('}'|',') {RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "expression expected after dictionary key and ':'") } +invalid_starred_expression: + | a='*' expression '=' b=expression { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot assign to iterable argument unpacking") } diff --git a/Include/cpython/classobject.h b/Include/cpython/classobject.h index 051041965002a3..d7c9ddd1336c46 100644 --- a/Include/cpython/classobject.h +++ b/Include/cpython/classobject.h @@ -26,12 +26,20 @@ PyAPI_FUNC(PyObject *) PyMethod_New(PyObject *, PyObject *); PyAPI_FUNC(PyObject *) PyMethod_Function(PyObject *); PyAPI_FUNC(PyObject *) PyMethod_Self(PyObject *); -/* Macros for direct access to these values. Type checks are *not* - done, so use with care. */ -#define PyMethod_GET_FUNCTION(meth) \ - (((PyMethodObject *)(meth)) -> im_func) -#define PyMethod_GET_SELF(meth) \ - (((PyMethodObject *)(meth)) -> im_self) +#define _PyMethod_CAST(meth) \ + (assert(PyMethod_Check(meth)), _Py_CAST(PyMethodObject*, meth)) + +/* Static inline functions for direct access to these values. + Type checks are *not* done, so use with care. */ +static inline PyObject* PyMethod_GET_FUNCTION(PyObject *meth) { + return _PyMethod_CAST(meth)->im_func; +} +#define PyMethod_GET_FUNCTION(meth) PyMethod_GET_FUNCTION(_PyObject_CAST(meth)) + +static inline PyObject* PyMethod_GET_SELF(PyObject *meth) { + return _PyMethod_CAST(meth)->im_self; +} +#define PyMethod_GET_SELF(meth) PyMethod_GET_SELF(_PyObject_CAST(meth)) typedef struct { PyObject_HEAD @@ -45,10 +53,16 @@ PyAPI_DATA(PyTypeObject) PyInstanceMethod_Type; PyAPI_FUNC(PyObject *) PyInstanceMethod_New(PyObject *); PyAPI_FUNC(PyObject *) PyInstanceMethod_Function(PyObject *); -/* Macros for direct access to these values. Type checks are *not* - done, so use with care. */ -#define PyInstanceMethod_GET_FUNCTION(meth) \ - (((PyInstanceMethodObject *)(meth)) -> func) +#define _PyInstanceMethod_CAST(meth) \ + (assert(PyInstanceMethod_Check(meth)), \ + _Py_CAST(PyInstanceMethodObject*, meth)) + +/* Static inline function for direct access to these values. + Type checks are *not* done, so use with care. */ +static inline PyObject* PyInstanceMethod_GET_FUNCTION(PyObject *meth) { + return _PyInstanceMethod_CAST(meth)->func; +} +#define PyInstanceMethod_GET_FUNCTION(meth) PyInstanceMethod_GET_FUNCTION(_PyObject_CAST(meth)) #ifdef __cplusplus } diff --git a/Include/cpython/code.h b/Include/cpython/code.h index 893ff934d3d879..d8c623b0e13446 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -16,21 +16,30 @@ extern "C" { * 2**32 - 1, rather than INT_MAX. */ -typedef uint16_t _Py_CODEUNIT; - -#ifdef WORDS_BIGENDIAN -# define _Py_OPCODE(word) ((word) >> 8) -# define _Py_OPARG(word) ((word) & 255) -# define _Py_MAKECODEUNIT(opcode, oparg) (((opcode)<<8)|(oparg)) -#else -# define _Py_OPCODE(word) ((word) & 255) -# define _Py_OPARG(word) ((word) >> 8) -# define _Py_MAKECODEUNIT(opcode, oparg) ((opcode)|((oparg)<<8)) -#endif +typedef union { + uint16_t cache; + struct { + uint8_t opcode; + uint8_t oparg; + }; + struct { + uint8_t oparg2; + uint8_t oparg3; + }; +} _Py_CODEUNIT; + +#define _Py_OPCODE(word) ((word).opcode) +#define _Py_OPARG(word) ((word).oparg) +#define _Py_OPARG2(word) ((word).oparg2) +#define _Py_OPARG3(word) ((word).oparg3) + +static inline void +_py_set_opcode(_Py_CODEUNIT *word, uint8_t opcode) +{ + word->opcode = opcode; +} -// Use "unsigned char" instead of "uint8_t" here to avoid illegal aliasing: -#define _Py_SET_OPCODE(word, opcode) \ - do { ((unsigned char *)&(word))[0] = (opcode); } while (0) +#define _Py_SET_OPCODE(word, opcode) _py_set_opocde(&(word), opcode) typedef struct { PyObject *_co_code; @@ -70,7 +79,6 @@ typedef struct { PyObject *co_exceptiontable; /* Byte string encoding exception handling \ table */ \ int co_flags; /* CO_..., see below */ \ - short co_warmup; /* Warmup counter for quickening */ \ short _co_linearray_entry_size; /* Size of each entry in _co_linearray */ \ \ /* The rest are not so impactful on performance. */ \ @@ -88,6 +96,7 @@ typedef struct { int co_nplaincellvars; /* number of non-arg cell variables */ \ int co_ncellvars; /* total number of cell variables */ \ int co_nfreevars; /* number of free variables */ \ + uint32_t co_version; /* version number */ \ \ PyObject *co_localsplusnames; /* tuple mapping offsets to names */ \ PyObject *co_localspluskinds; /* Bytes mapping to local kinds (one byte \ @@ -148,8 +157,13 @@ struct PyCodeObject _PyCode_DEF(1); PyAPI_DATA(PyTypeObject) PyCode_Type; #define PyCode_Check(op) Py_IS_TYPE((op), &PyCode_Type) -#define PyCode_GetNumFree(op) ((op)->co_nfreevars) -#define _PyCode_CODE(CO) ((_Py_CODEUNIT *)(CO)->co_code_adaptive) + +static inline Py_ssize_t PyCode_GetNumFree(PyCodeObject *op) { + assert(PyCode_Check(op)); + return op->co_nfreevars; +} + +#define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive) #define _PyCode_NBYTES(CO) (Py_SIZE(CO) * (Py_ssize_t)sizeof(_Py_CODEUNIT)) /* Public interface */ @@ -177,6 +191,41 @@ PyAPI_FUNC(int) PyCode_Addr2Line(PyCodeObject *, int); PyAPI_FUNC(int) PyCode_Addr2Location(PyCodeObject *, int, int *, int *, int *, int *); +typedef enum PyCodeEvent { + PY_CODE_EVENT_CREATE, + PY_CODE_EVENT_DESTROY +} PyCodeEvent; + + +/* + * A callback that is invoked for different events in a code object's lifecycle. + * + * The callback is invoked with a borrowed reference to co, after it is + * created and before it is destroyed. + * + * If the callback returns with an exception set, it must return -1. Otherwise + * it should return 0. + */ +typedef int (*PyCode_WatchCallback)( + PyCodeEvent event, + PyCodeObject* co); + +/* + * Register a per-interpreter callback that will be invoked for code object + * lifecycle events. + * + * Returns a handle that may be passed to PyCode_ClearWatcher on success, + * or -1 and sets an error if no more handles are available. + */ +PyAPI_FUNC(int) PyCode_AddWatcher(PyCode_WatchCallback callback); + +/* + * Clear the watcher associated with the watcher_id handle. + * + * Returns 0 on success or -1 if no watcher exists for the provided id. + */ +PyAPI_FUNC(int) PyCode_ClearWatcher(int watcher_id); + /* for internal use only */ struct _opaque { int computed_line; diff --git a/Include/cpython/floatobject.h b/Include/cpython/floatobject.h index 7795d9f83f05cb..127093098bfe64 100644 --- a/Include/cpython/floatobject.h +++ b/Include/cpython/floatobject.h @@ -7,9 +7,15 @@ typedef struct { double ob_fval; } PyFloatObject; -// Macro version of PyFloat_AsDouble() trading safety for speed. +#define _PyFloat_CAST(op) \ + (assert(PyFloat_Check(op)), _Py_CAST(PyFloatObject*, op)) + +// Static inline version of PyFloat_AsDouble() trading safety for speed. // It doesn't check if op is a double object. -#define PyFloat_AS_DOUBLE(op) (((PyFloatObject *)(op))->ob_fval) +static inline double PyFloat_AS_DOUBLE(PyObject *op) { + return _PyFloat_CAST(op)->ob_fval; +} +#define PyFloat_AS_DOUBLE(op) PyFloat_AS_DOUBLE(_PyObject_CAST(op)) PyAPI_FUNC(int) PyFloat_Pack2(double x, char *p, int le); diff --git a/Include/cpython/funcobject.h b/Include/cpython/funcobject.h index dd8f20b2c20b39..5979febc2e3421 100644 --- a/Include/cpython/funcobject.h +++ b/Include/cpython/funcobject.h @@ -131,6 +131,55 @@ PyAPI_DATA(PyTypeObject) PyStaticMethod_Type; PyAPI_FUNC(PyObject *) PyClassMethod_New(PyObject *); PyAPI_FUNC(PyObject *) PyStaticMethod_New(PyObject *); +#define FOREACH_FUNC_EVENT(V) \ + V(CREATE) \ + V(DESTROY) \ + V(MODIFY_CODE) \ + V(MODIFY_DEFAULTS) \ + V(MODIFY_KWDEFAULTS) + +typedef enum { + #define DEF_EVENT(EVENT) PyFunction_EVENT_##EVENT, + FOREACH_FUNC_EVENT(DEF_EVENT) + #undef DEF_EVENT +} PyFunction_WatchEvent; + +/* + * A callback that is invoked for different events in a function's lifecycle. + * + * The callback is invoked with a borrowed reference to func, after it is + * created and before it is modified or destroyed. The callback should not + * modify func. + * + * When a function's code object, defaults, or kwdefaults are modified the + * callback will be invoked with the respective event and new_value will + * contain a borrowed reference to the new value that is about to be stored in + * the function. Otherwise the third argument is NULL. + * + * If the callback returns with an exception set, it must return -1. Otherwise + * it should return 0. + */ +typedef int (*PyFunction_WatchCallback)( + PyFunction_WatchEvent event, + PyFunctionObject *func, + PyObject *new_value); + +/* + * Register a per-interpreter callback that will be invoked for function lifecycle + * events. + * + * Returns a handle that may be passed to PyFunction_ClearWatcher on success, + * or -1 and sets an error if no more handles are available. + */ +PyAPI_FUNC(int) PyFunction_AddWatcher(PyFunction_WatchCallback callback); + +/* + * Clear the watcher associated with the watcher_id handle. + * + * Returns 0 on success or -1 if no watcher exists for the supplied id. + */ +PyAPI_FUNC(int) PyFunction_ClearWatcher(int watcher_id); + #ifdef __cplusplus } #endif diff --git a/Include/cpython/import.h b/Include/cpython/import.h index a69b4f34def342..a58801b47f1bec 100644 --- a/Include/cpython/import.h +++ b/Include/cpython/import.h @@ -25,6 +25,7 @@ struct _inittab { const char *name; /* ASCII encoded string */ PyObject* (*initfunc)(void); }; +// This is not used after Py_Initialize() is called. PyAPI_DATA(struct _inittab *) PyImport_Inittab; PyAPI_FUNC(int) PyImport_ExtendInittab(struct _inittab *newtab); diff --git a/Include/cpython/memoryobject.h b/Include/cpython/memoryobject.h new file mode 100644 index 00000000000000..deab3cc89f726e --- /dev/null +++ b/Include/cpython/memoryobject.h @@ -0,0 +1,51 @@ +#ifndef Py_CPYTHON_MEMORYOBJECT_H +# error "this header file must not be included directly" +#endif + +PyAPI_DATA(PyTypeObject) _PyManagedBuffer_Type; + +/* The structs are declared here so that macros can work, but they shouldn't + be considered public. Don't access their fields directly, use the macros + and functions instead! */ +#define _Py_MANAGED_BUFFER_RELEASED 0x001 /* access to exporter blocked */ +#define _Py_MANAGED_BUFFER_FREE_FORMAT 0x002 /* free format */ + +typedef struct { + PyObject_HEAD + int flags; /* state flags */ + Py_ssize_t exports; /* number of direct memoryview exports */ + Py_buffer master; /* snapshot buffer obtained from the original exporter */ +} _PyManagedBufferObject; + + +/* memoryview state flags */ +#define _Py_MEMORYVIEW_RELEASED 0x001 /* access to master buffer blocked */ +#define _Py_MEMORYVIEW_C 0x002 /* C-contiguous layout */ +#define _Py_MEMORYVIEW_FORTRAN 0x004 /* Fortran contiguous layout */ +#define _Py_MEMORYVIEW_SCALAR 0x008 /* scalar: ndim = 0 */ +#define _Py_MEMORYVIEW_PIL 0x010 /* PIL-style layout */ + +typedef struct { + PyObject_VAR_HEAD + _PyManagedBufferObject *mbuf; /* managed buffer */ + Py_hash_t hash; /* hash value for read-only views */ + int flags; /* state flags */ + Py_ssize_t exports; /* number of buffer re-exports */ + Py_buffer view; /* private copy of the exporter's view */ + PyObject *weakreflist; + Py_ssize_t ob_array[1]; /* shape, strides, suboffsets */ +} PyMemoryViewObject; + +#define _PyMemoryView_CAST(op) _Py_CAST(PyMemoryViewObject*, op) + +/* Get a pointer to the memoryview's private copy of the exporter's buffer. */ +static inline Py_buffer* PyMemoryView_GET_BUFFER(PyObject *op) { + return (&_PyMemoryView_CAST(op)->view); +} +#define PyMemoryView_GET_BUFFER(op) PyMemoryView_GET_BUFFER(_PyObject_CAST(op)) + +/* Get a pointer to the exporting object (this may be NULL!). */ +static inline PyObject* PyMemoryView_GET_BASE(PyObject *op) { + return _PyMemoryView_CAST(op)->view.obj; +} +#define PyMemoryView_GET_BASE(op) PyMemoryView_GET_BASE(_PyObject_CAST(op)) diff --git a/Include/cpython/modsupport.h b/Include/cpython/modsupport.h index d8458923b3fab8..88f34fe7513bf2 100644 --- a/Include/cpython/modsupport.h +++ b/Include/cpython/modsupport.h @@ -106,5 +106,3 @@ PyAPI_FUNC(PyObject * const *) _PyArg_UnpackKeywordsWithVararg( (minpos), (maxpos), (minkw), (buf))) PyAPI_FUNC(PyObject *) _PyModule_CreateInitialized(PyModuleDef*, int apiver); - -PyAPI_DATA(const char *) _Py_PackageContext; diff --git a/Include/cpython/object.h b/Include/cpython/object.h index fa0cfb24484952..4263370861302b 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -41,7 +41,7 @@ typedef struct _Py_Identifier { Py_ssize_t index; } _Py_Identifier; -#if defined(NEEDS_PY_IDENTIFIER) || !defined(Py_BUILD_CORE) +#ifndef Py_BUILD_CORE // For now we are keeping _Py_IDENTIFIER for continued use // in non-builtin extensions (and naughty PyPI modules). @@ -49,7 +49,7 @@ typedef struct _Py_Identifier { #define _Py_static_string(varname, value) static _Py_Identifier varname = _Py_static_string_init(value) #define _Py_IDENTIFIER(varname) _Py_static_string(PyId_##varname, #varname) -#endif /* NEEDS_PY_IDENTIFIER */ +#endif /* !Py_BUILD_CORE */ typedef struct { /* Number implementations must check *both* @@ -305,38 +305,69 @@ _PyObject_GenericSetAttrWithDict(PyObject *, PyObject *, PyAPI_FUNC(PyObject *) _PyObject_FunctionStr(PyObject *); -/* Safely decref `op` and set `op` to `op2`. +/* Safely decref `dst` and set `dst` to `src`. * * As in case of Py_CLEAR "the obvious" code can be deadly: * - * Py_DECREF(op); - * op = op2; + * Py_DECREF(dst); + * dst = src; * * The safe way is: * - * Py_SETREF(op, op2); + * Py_SETREF(dst, src); + * + * That arranges to set `dst` to `src` _before_ decref'ing, so that any code + * triggered as a side-effect of `dst` getting torn down no longer believes + * `dst` points to a valid object. * - * That arranges to set `op` to `op2` _before_ decref'ing, so that any code - * triggered as a side-effect of `op` getting torn down no longer believes - * `op` points to a valid object. + * Temporary variables are used to only evalutate macro arguments once and so + * avoid the duplication of side effects. _Py_TYPEOF() or memcpy() is used to + * avoid a miscompilation caused by type punning. See Py_CLEAR() comment for + * implementation details about type punning. * - * Py_XSETREF is a variant of Py_SETREF that uses Py_XDECREF instead of - * Py_DECREF. + * The memcpy() implementation does not emit a compiler warning if 'src' has + * not the same type than 'src': any pointer type is accepted for 'src'. */ - -#define Py_SETREF(op, op2) \ - do { \ - PyObject *_py_tmp = _PyObject_CAST(op); \ - (op) = (op2); \ - Py_DECREF(_py_tmp); \ +#ifdef _Py_TYPEOF +#define Py_SETREF(dst, src) \ + do { \ + _Py_TYPEOF(dst)* _tmp_dst_ptr = &(dst); \ + _Py_TYPEOF(dst) _tmp_old_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = (src); \ + Py_DECREF(_tmp_old_dst); \ + } while (0) +#else +#define Py_SETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_old_dst = (*_tmp_dst_ptr); \ + PyObject *_tmp_src = _PyObject_CAST(src); \ + memcpy(_tmp_dst_ptr, &_tmp_src, sizeof(PyObject*)); \ + Py_DECREF(_tmp_old_dst); \ } while (0) +#endif -#define Py_XSETREF(op, op2) \ - do { \ - PyObject *_py_tmp = _PyObject_CAST(op); \ - (op) = (op2); \ - Py_XDECREF(_py_tmp); \ +/* Py_XSETREF() is a variant of Py_SETREF() that uses Py_XDECREF() instead of + * Py_DECREF(). + */ +#ifdef _Py_TYPEOF +#define Py_XSETREF(dst, src) \ + do { \ + _Py_TYPEOF(dst)* _tmp_dst_ptr = &(dst); \ + _Py_TYPEOF(dst) _tmp_old_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = (src); \ + Py_XDECREF(_tmp_old_dst); \ } while (0) +#else +#define Py_XSETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_old_dst = (*_tmp_dst_ptr); \ + PyObject *_tmp_src = _PyObject_CAST(src); \ + memcpy(_tmp_dst_ptr, &_tmp_src, sizeof(PyObject*)); \ + Py_XDECREF(_tmp_old_dst); \ + } while (0) +#endif PyAPI_DATA(PyTypeObject) _PyNone_Type; diff --git a/Include/cpython/objimpl.h b/Include/cpython/objimpl.h index d7c76eab5c7312..0b038d31080be9 100644 --- a/Include/cpython/objimpl.h +++ b/Include/cpython/objimpl.h @@ -2,7 +2,9 @@ # error "this header file must not be included directly" #endif -#define _PyObject_SIZE(typeobj) ( (typeobj)->tp_basicsize ) +static inline size_t _PyObject_SIZE(PyTypeObject *type) { + return _Py_STATIC_CAST(size_t, type->tp_basicsize); +} /* _PyObject_VAR_SIZE returns the number of bytes (as size_t) allocated for a vrbl-size object with nitems items, exclusive of gc overhead (if any). The @@ -18,10 +20,11 @@ # error "_PyObject_VAR_SIZE requires SIZEOF_VOID_P be a power of 2" #endif -#define _PyObject_VAR_SIZE(typeobj, nitems) \ - _Py_SIZE_ROUND_UP((typeobj)->tp_basicsize + \ - (nitems)*(typeobj)->tp_itemsize, \ - SIZEOF_VOID_P) +static inline size_t _PyObject_VAR_SIZE(PyTypeObject *type, Py_ssize_t nitems) { + size_t size = _Py_STATIC_CAST(size_t, type->tp_basicsize); + size += _Py_STATIC_CAST(size_t, nitems) * _Py_STATIC_CAST(size_t, type->tp_itemsize); + return _Py_SIZE_ROUND_UP(size, SIZEOF_VOID_P); +} /* This example code implements an object constructor with a custom diff --git a/Include/cpython/pyframe.h b/Include/cpython/pyframe.h index 1dc634ccee9a27..6ec292718aff1a 100644 --- a/Include/cpython/pyframe.h +++ b/Include/cpython/pyframe.h @@ -14,4 +14,5 @@ PyAPI_FUNC(PyObject *) PyFrame_GetBuiltins(PyFrameObject *frame); PyAPI_FUNC(PyObject *) PyFrame_GetGenerator(PyFrameObject *frame); PyAPI_FUNC(int) PyFrame_GetLasti(PyFrameObject *frame); - +PyAPI_FUNC(PyObject*) PyFrame_GetVar(PyFrameObject *frame, PyObject *name); +PyAPI_FUNC(PyObject*) PyFrame_GetVarString(PyFrameObject *frame, const char *name); diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 70c23427807e52..0117c23f518cdb 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -120,9 +120,6 @@ struct _ts { after allocation. */ int _initialized; - /* Was this thread state statically allocated? */ - int _static; - int py_recursion_remaining; int py_recursion_limit; @@ -356,6 +353,9 @@ PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); // is necessary to pass safely between interpreters in the same process. typedef struct _xid _PyCrossInterpreterData; +typedef PyObject *(*xid_newobjectfunc)(_PyCrossInterpreterData *); +typedef void (*xid_freefunc)(void *); + struct _xid { // data is the cross-interpreter-safe derivation of a Python object // (see _PyObject_GetCrossInterpreterData). It will be NULL if the @@ -382,7 +382,7 @@ struct _xid { // interpreter given the data. The resulting object (a new // reference) will be equivalent to the original object. This field // is required. - PyObject *(*new_object)(_PyCrossInterpreterData *); + xid_newobjectfunc new_object; // free is called when the data is released. If it is NULL then // nothing will be done to free the data. For some types this is // okay (e.g. bytes) and for those types this field should be set @@ -392,18 +392,31 @@ struct _xid { // leak. In that case, at the very least this field should be set // to PyMem_RawFree (the default if not explicitly set to NULL). // The call will happen with the original interpreter activated. - void (*free)(void *); + xid_freefunc free; }; +PyAPI_FUNC(void) _PyCrossInterpreterData_Init( + _PyCrossInterpreterData *data, + PyInterpreterState *interp, void *shared, PyObject *obj, + xid_newobjectfunc new_object); +PyAPI_FUNC(int) _PyCrossInterpreterData_InitWithSize( + _PyCrossInterpreterData *, + PyInterpreterState *interp, const size_t, PyObject *, + xid_newobjectfunc); +PyAPI_FUNC(void) _PyCrossInterpreterData_Clear( + PyInterpreterState *, _PyCrossInterpreterData *); + PyAPI_FUNC(int) _PyObject_GetCrossInterpreterData(PyObject *, _PyCrossInterpreterData *); PyAPI_FUNC(PyObject *) _PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *); -PyAPI_FUNC(void) _PyCrossInterpreterData_Release(_PyCrossInterpreterData *); +PyAPI_FUNC(int) _PyCrossInterpreterData_Release(_PyCrossInterpreterData *); PyAPI_FUNC(int) _PyObject_CheckCrossInterpreterData(PyObject *); /* cross-interpreter data registry */ -typedef int (*crossinterpdatafunc)(PyObject *, _PyCrossInterpreterData *); +typedef int (*crossinterpdatafunc)(PyThreadState *tstate, PyObject *, + _PyCrossInterpreterData *); PyAPI_FUNC(int) _PyCrossInterpreterData_RegisterClass(PyTypeObject *, crossinterpdatafunc); +PyAPI_FUNC(int) _PyCrossInterpreterData_UnregisterClass(PyTypeObject *); PyAPI_FUNC(crossinterpdatafunc) _PyCrossInterpreterData_Lookup(PyObject *); diff --git a/Include/cpython/setobject.h b/Include/cpython/setobject.h index b4443a678b7e77..20fd63eaae56e2 100644 --- a/Include/cpython/setobject.h +++ b/Include/cpython/setobject.h @@ -58,8 +58,13 @@ typedef struct { PyObject *weakreflist; /* List of weak references */ } PySetObject; -#define PySet_GET_SIZE(so) \ - (assert(PyAnySet_Check(so)), (((PySetObject *)(so))->used)) +#define _PySet_CAST(so) \ + (assert(PyAnySet_Check(so)), _Py_CAST(PySetObject*, so)) + +static inline Py_ssize_t PySet_GET_SIZE(PyObject *so) { + return _PySet_CAST(so)->used; +} +#define PySet_GET_SIZE(so) PySet_GET_SIZE(_PyObject_CAST(so)) PyAPI_DATA(PyObject *) _PySet_Dummy; diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 8444507ade1b31..75a74ffa2f9dff 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -135,7 +135,7 @@ typedef struct { unsigned int ascii:1; /* Padding to ensure that PyUnicode_DATA() is always aligned to 4 bytes (see issue #19537 on m68k). */ - unsigned int :25; + unsigned int :26; } state; } PyASCIIObject; @@ -231,9 +231,7 @@ enum PyUnicode_Kind { // new compiler warnings on "kind < PyUnicode_KIND(str)" (compare signed and // unsigned numbers) where kind type is an int or on // "unsigned int kind = PyUnicode_KIND(str)" (cast signed to unsigned). -// Only declare the function as static inline function in the limited C API -// version 3.12 which is stricter. -#define PyUnicode_KIND(op) (_PyASCIIObject_CAST(op)->state.kind) +#define PyUnicode_KIND(op) _Py_RVALUE(_PyASCIIObject_CAST(op)->state.kind) /* Return a void pointer to the raw unicode buffer. */ static inline void* _PyUnicode_COMPACT_DATA(PyObject *op) { diff --git a/Include/descrobject.h b/Include/descrobject.h index 77f221df07714f..0a420b865dfd1b 100644 --- a/Include/descrobject.h +++ b/Include/descrobject.h @@ -32,6 +32,61 @@ PyAPI_FUNC(PyObject *) PyDescr_NewGetSet(PyTypeObject *, PyGetSetDef *); PyAPI_FUNC(PyObject *) PyDictProxy_New(PyObject *); PyAPI_FUNC(PyObject *) PyWrapper_New(PyObject *, PyObject *); + +/* An array of PyMemberDef structures defines the name, type and offset + of selected members of a C structure. These can be read by + PyMember_GetOne() and set by PyMember_SetOne() (except if their READONLY + flag is set). The array must be terminated with an entry whose name + pointer is NULL. */ +struct PyMemberDef { + const char *name; + int type; + Py_ssize_t offset; + int flags; + const char *doc; +}; + +// These constants used to be in structmember.h, not prefixed by Py_. +// (structmember.h now has aliases to the new names.) + +/* Types */ +#define Py_T_SHORT 0 +#define Py_T_INT 1 +#define Py_T_LONG 2 +#define Py_T_FLOAT 3 +#define Py_T_DOUBLE 4 +#define Py_T_STRING 5 +#define _Py_T_OBJECT 6 // Deprecated, use Py_T_OBJECT_EX instead +/* the ordering here is weird for binary compatibility */ +#define Py_T_CHAR 7 /* 1-character string */ +#define Py_T_BYTE 8 /* 8-bit signed int */ +/* unsigned variants: */ +#define Py_T_UBYTE 9 +#define Py_T_USHORT 10 +#define Py_T_UINT 11 +#define Py_T_ULONG 12 + +/* Added by Jack: strings contained in the structure */ +#define Py_T_STRING_INPLACE 13 + +/* Added by Lillo: bools contained in the structure (assumed char) */ +#define Py_T_BOOL 14 + +#define Py_T_OBJECT_EX 16 +#define Py_T_LONGLONG 17 +#define Py_T_ULONGLONG 18 + +#define Py_T_PYSSIZET 19 /* Py_ssize_t */ +#define _Py_T_NONE 20 // Deprecated. Value is always None. + +/* Flags */ +#define Py_READONLY 1 +#define Py_AUDIT_READ 2 // Added in 3.10, harmless no-op before that +#define _Py_WRITE_RESTRICTED 4 // Deprecated, no-op. Do not reuse the value. + +PyAPI_FUNC(PyObject *) PyMember_GetOne(const char *, PyMemberDef *); +PyAPI_FUNC(int) PyMember_SetOne(char *, PyMemberDef *, PyObject *); + #ifndef Py_LIMITED_API # define Py_CPYTHON_DESCROBJECT_H # include "cpython/descrobject.h" diff --git a/Include/exports.h b/Include/exports.h index fc1a5c5ead6276..59373c39ff757c 100644 --- a/Include/exports.h +++ b/Include/exports.h @@ -2,9 +2,15 @@ #define Py_EXPORTS_H #if defined(_WIN32) || defined(__CYGWIN__) - #define Py_IMPORTED_SYMBOL __declspec(dllimport) - #define Py_EXPORTED_SYMBOL __declspec(dllexport) - #define Py_LOCAL_SYMBOL + #if defined(Py_ENABLE_SHARED) + #define Py_IMPORTED_SYMBOL __declspec(dllimport) + #define Py_EXPORTED_SYMBOL __declspec(dllexport) + #define Py_LOCAL_SYMBOL + #else + #define Py_IMPORTED_SYMBOL + #define Py_EXPORTED_SYMBOL + #define Py_LOCAL_SYMBOL + #endif #else /* * If we only ever used gcc >= 5, we could use __has_attribute(visibility) diff --git a/Include/internal/pycore_ceval_state.h b/Include/internal/pycore_ceval_state.h new file mode 100644 index 00000000000000..9ba42eb03b2676 --- /dev/null +++ b/Include/internal/pycore_ceval_state.h @@ -0,0 +1,100 @@ +#ifndef Py_INTERNAL_CEVAL_STATE_H +#define Py_INTERNAL_CEVAL_STATE_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +#include "pycore_atomic.h" /* _Py_atomic_address */ +#include "pycore_gil.h" // struct _gil_runtime_state + + +typedef enum { + PERF_STATUS_FAILED = -1, // Perf trampoline is in an invalid state + PERF_STATUS_NO_INIT = 0, // Perf trampoline is not initialized + PERF_STATUS_OK = 1, // Perf trampoline is ready to be executed +} perf_status_t; + + +#ifdef PY_HAVE_PERF_TRAMPOLINE +struct code_arena_st; + +struct trampoline_api_st { + void* (*init_state)(void); + void (*write_state)(void* state, const void *code_addr, + unsigned int code_size, PyCodeObject* code); + int (*free_state)(void* state); + void *state; +}; +#endif + +struct _ceval_runtime_state { + struct { +#ifdef PY_HAVE_PERF_TRAMPOLINE + perf_status_t status; + Py_ssize_t extra_code_index; + struct code_arena_st *code_arena; + struct trampoline_api_st trampoline_api; + FILE *map_file; +#else + int _not_used; +#endif + } perf; + /* Request for checking signals. It is shared by all interpreters (see + bpo-40513). Any thread of any interpreter can receive a signal, but only + the main thread of the main interpreter can handle signals: see + _Py_ThreadCanHandleSignals(). */ + _Py_atomic_int signals_pending; + struct _gil_runtime_state gil; +}; + +#ifdef PY_HAVE_PERF_TRAMPOLINE +# define _PyEval_RUNTIME_PERF_INIT \ + { \ + .status = PERF_STATUS_NO_INIT, \ + .extra_code_index = -1, \ + } +#else +# define _PyEval_RUNTIME_PERF_INIT {0} +#endif + + +struct _pending_calls { + int busy; + PyThread_type_lock lock; + /* Request for running pending calls. */ + _Py_atomic_int calls_to_do; + /* Request for looking at the `async_exc` field of the current + thread state. + Guarded by the GIL. */ + int async_exc; +#define NPENDINGCALLS 32 + struct { + int (*func)(void *); + void *arg; + } calls[NPENDINGCALLS]; + int first; + int last; +}; + +struct _ceval_state { + int recursion_limit; + /* This single variable consolidates all requests to break out of + the fast path in the eval loop. */ + _Py_atomic_int eval_breaker; + /* Request for dropping the GIL */ + _Py_atomic_int gil_drop_request; + /* The GC is ready to be executed */ + _Py_atomic_int gc_scheduled; + struct _pending_calls pending; +}; + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_CEVAL_STATE_H */ diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index bf5945435c1774..9e59fc98bf3d57 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -4,6 +4,8 @@ extern "C" { #endif +#define CODE_MAX_WATCHERS 8 + /* PEP 659 * Specialization and quickening structs and helper functions */ @@ -16,53 +18,53 @@ extern "C" { #define CACHE_ENTRIES(cache) (sizeof(cache)/sizeof(_Py_CODEUNIT)) typedef struct { - _Py_CODEUNIT counter; - _Py_CODEUNIT index; - _Py_CODEUNIT module_keys_version[2]; - _Py_CODEUNIT builtin_keys_version; + uint16_t counter; + uint16_t index; + uint16_t module_keys_version[2]; + uint16_t builtin_keys_version; } _PyLoadGlobalCache; #define INLINE_CACHE_ENTRIES_LOAD_GLOBAL CACHE_ENTRIES(_PyLoadGlobalCache) typedef struct { - _Py_CODEUNIT counter; + uint16_t counter; } _PyBinaryOpCache; #define INLINE_CACHE_ENTRIES_BINARY_OP CACHE_ENTRIES(_PyBinaryOpCache) typedef struct { - _Py_CODEUNIT counter; + uint16_t counter; } _PyUnpackSequenceCache; #define INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE \ CACHE_ENTRIES(_PyUnpackSequenceCache) typedef struct { - _Py_CODEUNIT counter; - _Py_CODEUNIT mask; + uint16_t counter; + uint16_t mask; } _PyCompareOpCache; #define INLINE_CACHE_ENTRIES_COMPARE_OP CACHE_ENTRIES(_PyCompareOpCache) typedef struct { - _Py_CODEUNIT counter; - _Py_CODEUNIT type_version[2]; - _Py_CODEUNIT func_version; + uint16_t counter; + uint16_t type_version[2]; + uint16_t func_version; } _PyBinarySubscrCache; #define INLINE_CACHE_ENTRIES_BINARY_SUBSCR CACHE_ENTRIES(_PyBinarySubscrCache) typedef struct { - _Py_CODEUNIT counter; - _Py_CODEUNIT version[2]; - _Py_CODEUNIT index; + uint16_t counter; + uint16_t version[2]; + uint16_t index; } _PyAttrCache; typedef struct { - _Py_CODEUNIT counter; - _Py_CODEUNIT type_version[2]; - _Py_CODEUNIT keys_version[2]; - _Py_CODEUNIT descr[4]; + uint16_t counter; + uint16_t type_version[2]; + uint16_t keys_version[2]; + uint16_t descr[4]; } _PyLoadMethodCache; @@ -72,47 +74,25 @@ typedef struct { #define INLINE_CACHE_ENTRIES_STORE_ATTR CACHE_ENTRIES(_PyAttrCache) typedef struct { - _Py_CODEUNIT counter; - _Py_CODEUNIT func_version[2]; - _Py_CODEUNIT min_args; + uint16_t counter; + uint16_t func_version[2]; + uint16_t min_args; } _PyCallCache; #define INLINE_CACHE_ENTRIES_CALL CACHE_ENTRIES(_PyCallCache) typedef struct { - _Py_CODEUNIT counter; + uint16_t counter; } _PyStoreSubscrCache; #define INLINE_CACHE_ENTRIES_STORE_SUBSCR CACHE_ENTRIES(_PyStoreSubscrCache) typedef struct { - _Py_CODEUNIT counter; + uint16_t counter; } _PyForIterCache; #define INLINE_CACHE_ENTRIES_FOR_ITER CACHE_ENTRIES(_PyForIterCache) -#define QUICKENING_WARMUP_DELAY 8 - -/* We want to compare to zero for efficiency, so we offset values accordingly */ -#define QUICKENING_INITIAL_WARMUP_VALUE (-QUICKENING_WARMUP_DELAY) - -void _PyCode_Quicken(PyCodeObject *code); - -static inline void -_PyCode_Warmup(PyCodeObject *code) -{ - if (code->co_warmup != 0) { - code->co_warmup++; - if (code->co_warmup == 0) { - _PyCode_Quicken(code); - } - } -} - -extern uint8_t _PyOpcode_Adaptive[256]; - -extern Py_ssize_t _Py_QuickenedCount; - // Borrowed references to common callables: struct callable_cache { PyObject *isinstance; @@ -235,27 +215,30 @@ extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range); /* Specialization functions */ -extern int _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, - PyObject *name); -extern int _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, +extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name); -extern int _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins, _Py_CODEUNIT *instr, PyObject *name); -extern int _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container, _Py_CODEUNIT *instr); -extern int _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *instr); -extern int _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, - int nargs, PyObject *kwnames); +extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, + PyObject *name); +extern void _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins, + _Py_CODEUNIT *instr, PyObject *name); +extern void _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container, + _Py_CODEUNIT *instr); +extern void _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, + _Py_CODEUNIT *instr); +extern void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, + int nargs, PyObject *kwnames); extern void _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg, PyObject **locals); extern void _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg); extern void _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg); -extern void _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr); +extern void _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg); -/* Deallocator function for static codeobjects used in deepfreeze.py */ -extern void _PyStaticCode_Dealloc(PyCodeObject *co); -/* Function to intern strings of codeobjects */ -extern int _PyStaticCode_InternStrings(PyCodeObject *co); +/* Finalizer function for static codeobjects used in deepfreeze.py */ +extern void _PyStaticCode_Fini(PyCodeObject *co); +/* Function to intern strings of codeobjects and quicken the bytecode */ +extern int _PyStaticCode_Init(PyCodeObject *co); #ifdef Py_STATS @@ -312,6 +295,12 @@ write_obj(uint16_t *p, PyObject *val) memcpy(p, &val, sizeof(val)); } +static inline uint16_t +read_u16(uint16_t *p) +{ + return *p; +} + static inline uint32_t read_u32(uint16_t *p) { @@ -397,8 +386,22 @@ write_location_entry_start(uint8_t *ptr, int code, int length) /* With a 16-bit counter, we have 12 bits for the counter value, and 4 bits for the backoff */ #define ADAPTIVE_BACKOFF_BITS 4 -/* The initial counter value is 31 == 2**ADAPTIVE_BACKOFF_START - 1 */ -#define ADAPTIVE_BACKOFF_START 5 + +// A value of 1 means that we attempt to specialize the *second* time each +// instruction is executed. Executing twice is a much better indicator of +// "hotness" than executing once, but additional warmup delays only prevent +// specialization. Most types stabilize by the second execution, too: +#define ADAPTIVE_WARMUP_VALUE 1 +#define ADAPTIVE_WARMUP_BACKOFF 1 + +// A value of 52 means that we attempt to re-specialize after 53 misses (a prime +// number, useful for avoiding artifacts if every nth value is a different type +// or something). Setting the backoff to 0 means that the counter is reset to +// the same state as a warming-up instruction (value == 1, backoff == 1) after +// deoptimization. This isn't strictly necessary, but it is bit easier to reason +// about when thinking about the opcode transitions as a state machine: +#define ADAPTIVE_COOLDOWN_VALUE 52 +#define ADAPTIVE_COOLDOWN_BACKOFF 0 #define MAX_BACKOFF_VALUE (16 - ADAPTIVE_BACKOFF_BITS) @@ -406,13 +409,19 @@ write_location_entry_start(uint8_t *ptr, int code, int length) static inline uint16_t adaptive_counter_bits(int value, int backoff) { return (value << ADAPTIVE_BACKOFF_BITS) | - (backoff & ((1<<ADAPTIVE_BACKOFF_BITS)-1)); + (backoff & ((1<<ADAPTIVE_BACKOFF_BITS)-1)); } static inline uint16_t -adaptive_counter_start(void) { - unsigned int value = (1 << ADAPTIVE_BACKOFF_START) - 1; - return adaptive_counter_bits(value, ADAPTIVE_BACKOFF_START); +adaptive_counter_warmup(void) { + return adaptive_counter_bits(ADAPTIVE_WARMUP_VALUE, + ADAPTIVE_WARMUP_BACKOFF); +} + +static inline uint16_t +adaptive_counter_cooldown(void) { + return adaptive_counter_bits(ADAPTIVE_COOLDOWN_VALUE, + ADAPTIVE_COOLDOWN_BACKOFF); } static inline uint16_t @@ -455,6 +464,18 @@ _PyCode_LineNumberFromArray(PyCodeObject *co, int index) } } +typedef struct _PyShimCodeDef { + const uint8_t *code; + int codelen; + int stacksize; + const char *cname; +} _PyShimCodeDef; + +extern PyCodeObject * +_Py_MakeShimCode(const _PyShimCodeDef *code); + +extern uint32_t _Py_next_func_version; + #ifdef __cplusplus } diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index 1a628a08ca4ebf..967fe92a5bc2b2 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -18,10 +18,11 @@ PyAPI_FUNC(PyCodeObject*) _PyAST_Compile( PyCompilerFlags *flags, int optimize, struct _arena *arena); -extern PyFutureFeatures* _PyFuture_FromAST( + +int _PyFuture_FromAST( struct _mod * mod, - PyObject *filename - ); + PyObject *filename, + PyFutureFeatures* futures); extern PyObject* _Py_Mangle(PyObject *p, PyObject *name); @@ -39,6 +40,13 @@ extern int _PyAST_Optimize( _PyASTOptimizeState *state); /* Access compiler internals for unit testing */ + +PyAPI_FUNC(PyObject*) _PyCompile_CodeGen( + PyObject *ast, + PyObject *filename, + PyCompilerFlags *flags, + int optimize); + PyAPI_FUNC(PyObject*) _PyCompile_OptimizeCfg( PyObject *instructions, PyObject *consts); diff --git a/Include/internal/pycore_context.h b/Include/internal/pycore_context.h index 1bf4e8f3ee532e..52dfe3ef233874 100644 --- a/Include/internal/pycore_context.h +++ b/Include/internal/pycore_context.h @@ -18,6 +18,10 @@ void _PyContext_Fini(PyInterpreterState *); /* other API */ +typedef struct { + PyObject_HEAD +} _PyContextTokenMissing; + #ifndef WITH_FREELISTS // without freelists # define PyContext_MAXFREELIST 0 diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index ae4094a095d879..c74a3437713039 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -9,6 +9,9 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_dict_state.h" +#include "pycore_runtime.h" // _PyRuntime + /* runtime lifecycle */ @@ -17,25 +20,6 @@ extern void _PyDict_Fini(PyInterpreterState *interp); /* other API */ -#ifndef WITH_FREELISTS -// without freelists -# define PyDict_MAXFREELIST 0 -#endif - -#ifndef PyDict_MAXFREELIST -# define PyDict_MAXFREELIST 80 -#endif - -struct _Py_dict_state { -#if PyDict_MAXFREELIST > 0 - /* Dictionary reuse scheme to save calls to malloc and free */ - PyDictObject *free_list[PyDict_MAXFREELIST]; - int numfree; - PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; - int keys_numfree; -#endif -}; - typedef struct { /* Cached hash code of me_key. */ Py_hash_t me_hash; @@ -55,7 +39,7 @@ extern PyObject *_PyDict_FromKeys(PyObject *, PyObject *, PyObject *); * Returns the version number, or zero if it was not possible to get a version number. */ extern uint32_t _PyDictKeys_GetVersionForCurrentState(PyDictKeysObject *dictkeys); -extern Py_ssize_t _PyDict_KeysSize(PyDictKeysObject *keys); +extern size_t _PyDict_KeysSize(PyDictKeysObject *keys); /* _Py_dict_lookup() returns index of entry which can be used like DK_ENTRIES(dk)[index]. * -1 when no entry found, -3 when compare raises error. @@ -138,27 +122,34 @@ struct _dictvalues { PyObject *values[1]; }; -#define DK_LOG_SIZE(dk) ((dk)->dk_log2_size) +#define DK_LOG_SIZE(dk) _Py_RVALUE((dk)->dk_log2_size) #if SIZEOF_VOID_P > 4 #define DK_SIZE(dk) (((int64_t)1)<<DK_LOG_SIZE(dk)) #else #define DK_SIZE(dk) (1<<DK_LOG_SIZE(dk)) #endif -#define DK_ENTRIES(dk) \ - (assert((dk)->dk_kind == DICT_KEYS_GENERAL), \ - (PyDictKeyEntry*)(&((int8_t*)((dk)->dk_indices))[(size_t)1 << (dk)->dk_log2_index_bytes])) -#define DK_UNICODE_ENTRIES(dk) \ - (assert((dk)->dk_kind != DICT_KEYS_GENERAL), \ - (PyDictUnicodeEntry*)(&((int8_t*)((dk)->dk_indices))[(size_t)1 << (dk)->dk_log2_index_bytes])) -#define DK_IS_UNICODE(dk) ((dk)->dk_kind != DICT_KEYS_GENERAL) -extern uint64_t _pydict_global_version; +static inline void* _DK_ENTRIES(PyDictKeysObject *dk) { + int8_t *indices = (int8_t*)(dk->dk_indices); + size_t index = (size_t)1 << dk->dk_log2_index_bytes; + return (&indices[index]); +} +static inline PyDictKeyEntry* DK_ENTRIES(PyDictKeysObject *dk) { + assert(dk->dk_kind == DICT_KEYS_GENERAL); + return (PyDictKeyEntry*)_DK_ENTRIES(dk); +} +static inline PyDictUnicodeEntry* DK_UNICODE_ENTRIES(PyDictKeysObject *dk) { + assert(dk->dk_kind != DICT_KEYS_GENERAL); + return (PyDictUnicodeEntry*)_DK_ENTRIES(dk); +} + +#define DK_IS_UNICODE(dk) ((dk)->dk_kind != DICT_KEYS_GENERAL) -#define DICT_MAX_WATCHERS 8 #define DICT_VERSION_INCREMENT (1 << DICT_MAX_WATCHERS) #define DICT_VERSION_MASK (DICT_VERSION_INCREMENT - 1) -#define DICT_NEXT_VERSION() (_pydict_global_version += DICT_VERSION_INCREMENT) +#define DICT_NEXT_VERSION() \ + (_PyRuntime.dict_state.global_version += DICT_VERSION_INCREMENT) void _PyDict_SendEvent(int watcher_bits, diff --git a/Include/internal/pycore_dict_state.h b/Include/internal/pycore_dict_state.h new file mode 100644 index 00000000000000..77375ea8beb877 --- /dev/null +++ b/Include/internal/pycore_dict_state.h @@ -0,0 +1,47 @@ +#ifndef Py_INTERNAL_DICT_STATE_H +#define Py_INTERNAL_DICT_STATE_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +struct _Py_dict_runtime_state { + /*Global counter used to set ma_version_tag field of dictionary. + * It is incremented each time that a dictionary is created and each + * time that a dictionary is modified. */ + uint64_t global_version; + uint32_t next_keys_version; +}; + + +#ifndef WITH_FREELISTS +// without freelists +# define PyDict_MAXFREELIST 0 +#endif + +#ifndef PyDict_MAXFREELIST +# define PyDict_MAXFREELIST 80 +#endif + +#define DICT_MAX_WATCHERS 8 + +struct _Py_dict_state { +#if PyDict_MAXFREELIST > 0 + /* Dictionary reuse scheme to save calls to malloc and free */ + PyDictObject *free_list[PyDict_MAXFREELIST]; + PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; + int numfree; + int keys_numfree; +#endif + PyDict_WatchCallback watchers[DICT_MAX_WATCHERS]; +}; + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_DICT_STATE_H */ diff --git a/Include/internal/pycore_dtoa.h b/Include/internal/pycore_dtoa.h index c77cf6e46cc3c3..67189cf0ade665 100644 --- a/Include/internal/pycore_dtoa.h +++ b/Include/internal/pycore_dtoa.h @@ -1,3 +1,5 @@ +#ifndef Py_INTERNAL_DTOA_H +#define Py_INTERNAL_DTOA_H #ifdef __cplusplus extern "C" { #endif @@ -11,6 +13,49 @@ extern "C" { #if _PY_SHORT_FLOAT_REPR == 1 +typedef uint32_t ULong; + +struct +Bigint { + struct Bigint *next; + int k, maxwds, sign, wds; + ULong x[1]; +}; + +#ifdef Py_USING_MEMORY_DEBUGGER + +struct _dtoa_runtime_state { + int _not_used; +}; +#define _dtoa_runtime_state_INIT {0} + +#else // !Py_USING_MEMORY_DEBUGGER + +/* The size of the Bigint freelist */ +#define Bigint_Kmax 7 + +#ifndef PRIVATE_MEM +#define PRIVATE_MEM 2304 +#endif +#define Bigint_PREALLOC_SIZE \ + ((PRIVATE_MEM+sizeof(double)-1)/sizeof(double)) + +struct _dtoa_runtime_state { + /* p5s is a linked list of powers of 5 of the form 5**(2**i), i >= 2 */ + // XXX This should be freed during runtime fini. + struct Bigint *p5s; + struct Bigint *freelist[Bigint_Kmax+1]; + double preallocated[Bigint_PREALLOC_SIZE]; + double *preallocated_next; +}; +#define _dtoa_runtime_state_INIT(runtime) \ + { \ + .preallocated_next = runtime.dtoa.preallocated, \ + } + +#endif // !Py_USING_MEMORY_DEBUGGER + + /* These functions are used by modules compiled as C extension like math: they must be exported. */ @@ -26,3 +71,4 @@ PyAPI_FUNC(double) _Py_dg_infinity(int sign); #ifdef __cplusplus } #endif +#endif /* !Py_INTERNAL_DTOA_H */ diff --git a/Include/internal/pycore_faulthandler.h b/Include/internal/pycore_faulthandler.h new file mode 100644 index 00000000000000..e6aec7745a6479 --- /dev/null +++ b/Include/internal/pycore_faulthandler.h @@ -0,0 +1,99 @@ +#ifndef Py_INTERNAL_FAULTHANDLER_H +#define Py_INTERNAL_FAULTHANDLER_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#ifdef HAVE_SIGACTION +# include <signal.h> +#endif + + +#ifndef MS_WINDOWS + /* register() is useless on Windows, because only SIGSEGV, SIGABRT and + SIGILL can be handled by the process, and these signals can only be used + with enable(), not using register() */ +# define FAULTHANDLER_USER +#endif + + +#ifdef HAVE_SIGACTION +/* Using an alternative stack requires sigaltstack() + and sigaction() SA_ONSTACK */ +# ifdef HAVE_SIGALTSTACK +# define FAULTHANDLER_USE_ALT_STACK +# endif +typedef struct sigaction _Py_sighandler_t; +#else +typedef PyOS_sighandler_t _Py_sighandler_t; +#endif // HAVE_SIGACTION + + +#ifdef FAULTHANDLER_USER +struct faulthandler_user_signal { + int enabled; + PyObject *file; + int fd; + int all_threads; + int chain; + _Py_sighandler_t previous; + PyInterpreterState *interp; +}; +#endif /* FAULTHANDLER_USER */ + + +struct _faulthandler_runtime_state { + struct { + int enabled; + PyObject *file; + int fd; + int all_threads; + PyInterpreterState *interp; +#ifdef MS_WINDOWS + void *exc_handler; +#endif + } fatal_error; + + struct { + PyObject *file; + int fd; + PY_TIMEOUT_T timeout_us; /* timeout in microseconds */ + int repeat; + PyInterpreterState *interp; + int exit; + char *header; + size_t header_len; + /* The main thread always holds this lock. It is only released when + faulthandler_thread() is interrupted before this thread exits, or at + Python exit. */ + PyThread_type_lock cancel_event; + /* released by child thread when joined */ + PyThread_type_lock running; + } thread; + +#ifdef FAULTHANDLER_USER + struct faulthandler_user_signal *user_signals; +#endif + +#ifdef FAULTHANDLER_USE_ALT_STACK + stack_t stack; + stack_t old_stack; +#endif +}; + +#define _faulthandler_runtime_state_INIT \ + { \ + .fatal_error = { \ + .fd = -1, \ + }, \ + } + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_FAULTHANDLER_H */ diff --git a/Include/internal/pycore_fileutils.h b/Include/internal/pycore_fileutils.h index 3ce8108e4e04f1..ac89c43d569c07 100644 --- a/Include/internal/pycore_fileutils.h +++ b/Include/internal/pycore_fileutils.h @@ -10,6 +10,11 @@ extern "C" { #include <locale.h> /* struct lconv */ + +struct _fileutils_state { + int force_ascii; +}; + typedef enum { _Py_ERROR_UNKNOWN=0, _Py_ERROR_STRICT, diff --git a/Include/internal/pycore_floatobject.h b/Include/internal/pycore_floatobject.h index 8a655543329f33..27c63bc87f3ee3 100644 --- a/Include/internal/pycore_floatobject.h +++ b/Include/internal/pycore_floatobject.h @@ -19,6 +19,18 @@ extern void _PyFloat_FiniType(PyInterpreterState *); /* other API */ +enum _py_float_format_type { + _py_float_format_unknown, + _py_float_format_ieee_big_endian, + _py_float_format_ieee_little_endian, +}; + +struct _Py_float_runtime_state { + enum _py_float_format_type float_format; + enum _py_float_format_type double_format; +}; + + #ifndef WITH_FREELISTS // without freelists # define PyFloat_MAXFREELIST 0 diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 5bd0a7f2f517ef..7fa410d288c33a 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -42,17 +42,18 @@ typedef enum _framestate { enum _frameowner { FRAME_OWNED_BY_THREAD = 0, FRAME_OWNED_BY_GENERATOR = 1, - FRAME_OWNED_BY_FRAME_OBJECT = 2 + FRAME_OWNED_BY_FRAME_OBJECT = 2, + FRAME_OWNED_BY_CSTACK = 3, }; typedef struct _PyInterpreterFrame { /* "Specials" section */ - PyObject *f_funcobj; /* Strong reference */ - PyObject *f_globals; /* Borrowed reference */ - PyObject *f_builtins; /* Borrowed reference */ - PyObject *f_locals; /* Strong reference, may be NULL */ + PyObject *f_funcobj; /* Strong reference. Only valid if not on C stack */ + PyObject *f_globals; /* Borrowed reference. Only valid if not on C stack */ + PyObject *f_builtins; /* Borrowed reference. Only valid if not on C stack */ + PyObject *f_locals; /* Strong reference, may be NULL. Only valid if not on C stack */ PyCodeObject *f_code; /* Strong reference */ - PyFrameObject *frame_obj; /* Strong reference, may be NULL */ + PyFrameObject *frame_obj; /* Strong reference, may be NULL. Only valid if not on C stack */ /* Linkage section */ struct _PyInterpreterFrame *previous; // NOTE: This is not necessarily the last instruction started in the given @@ -60,8 +61,8 @@ typedef struct _PyInterpreterFrame { // example, it may be an inline CACHE entry, an instruction we just jumped // over, or (in the case of a newly-created frame) a totally invalid value: _Py_CODEUNIT *prev_instr; - int stacktop; /* Offset of TOS from localsplus */ - bool is_entry; // Whether this is the "root" frame for the current _PyCFrame. + int stacktop; /* Offset of TOS from localsplus */ + uint16_t yield_offset; char owner; /* Locals and stack */ PyObject *localsplus[1]; @@ -109,7 +110,7 @@ _PyFrame_InitializeSpecials( frame->stacktop = code->co_nlocalsplus; frame->frame_obj = NULL; frame->prev_instr = _PyCode_CODE(code) - 1; - frame->is_entry = false; + frame->yield_offset = 0; frame->owner = FRAME_OWNED_BY_THREAD; } diff --git a/Include/internal/pycore_function.h b/Include/internal/pycore_function.h index 1c87aa31ddb611..5cedb33d7e3afd 100644 --- a/Include/internal/pycore_function.h +++ b/Include/internal/pycore_function.h @@ -8,6 +8,12 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#define FUNC_MAX_WATCHERS 8 + +struct _py_func_runtime_state { + uint32_t next_version; +}; + extern PyFunctionObject* _PyFunction_FromConstructor(PyFrameConstructor *constr); extern uint32_t _PyFunction_GetVersionForCurrentState(PyFunctionObject *func); diff --git a/Include/internal/pycore_global_objects.h b/Include/internal/pycore_global_objects.h index 82e89db7b1b750..d0461fa7e82e8b 100644 --- a/Include/internal/pycore_global_objects.h +++ b/Include/internal/pycore_global_objects.h @@ -10,6 +10,9 @@ extern "C" { #include "pycore_gc.h" // PyGC_Head #include "pycore_global_strings.h" // struct _Py_global_strings +#include "pycore_hamt.h" // PyHamtNode_Bitmap +#include "pycore_context.h" // _PyContextTokenMissing +#include "pycore_typeobject.h" // pytype_slotdef // These would be in pycore_long.h if it weren't for an include cycle. @@ -20,12 +23,21 @@ extern "C" { // Only immutable objects should be considered runtime-global. // All others must be per-interpreter. +#define _Py_CACHED_OBJECT(NAME) \ + _PyRuntime.cached_objects.NAME + +struct _Py_cached_objects { + PyObject *str_replace_inf; + + PyObject *interned_strings; +}; + #define _Py_GLOBAL_OBJECT(NAME) \ - _PyRuntime.global_objects.NAME + _PyRuntime.static_objects.NAME #define _Py_SINGLETON(NAME) \ _Py_GLOBAL_OBJECT(singletons.NAME) -struct _Py_global_objects { +struct _Py_static_objects { struct { /* Small integers are preallocated in this array so that they * can be shared. @@ -44,9 +56,36 @@ struct _Py_global_objects { _PyGC_Head_UNUSED _tuple_empty_gc_not_used; PyTupleObject tuple_empty; + + _PyGC_Head_UNUSED _hamt_bitmap_node_empty_gc_not_used; + PyHamtNode_Bitmap hamt_bitmap_node_empty; + _PyContextTokenMissing context_token_missing; } singletons; +}; - PyObject *interned; +#define _Py_INTERP_CACHED_OBJECT(interp, NAME) \ + (interp)->cached_objects.NAME + +struct _Py_interp_cached_objects { + int _not_set; + /* object.__reduce__ */ + PyObject *objreduce; + PyObject *type_slots_pname; + pytype_slotdef *type_slots_ptrs[MAX_EQUIV]; +}; + +#define _Py_INTERP_STATIC_OBJECT(interp, NAME) \ + (interp)->static_objects.NAME +#define _Py_INTERP_SINGLETON(interp, NAME) \ + _Py_INTERP_STATIC_OBJECT(interp, singletons.NAME) + +struct _Py_interp_static_objects { + struct { + int _not_used; + // hamt_empty is here instead of global because of its weakreflist. + _PyGC_Head_UNUSED _hamt_empty_gc_not_used; + PyHamtObject hamt_empty; + } singletons; }; diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h new file mode 100644 index 00000000000000..6aba2f19ebde4a --- /dev/null +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -0,0 +1,1497 @@ +#ifndef Py_INTERNAL_GLOBAL_OBJECTS_FINI_GENERATED_INIT_H +#define Py_INTERNAL_GLOBAL_OBJECTS_FINI_GENERATED_INIT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "pycore_object.h" // _PyObject_IMMORTAL_REFCNT + +#ifdef Py_DEBUG +static inline void +_PyStaticObject_CheckRefcnt(PyObject *obj) { + if (Py_REFCNT(obj) < _PyObject_IMMORTAL_REFCNT) { + _PyObject_ASSERT_FAILED_MSG(obj, + "immortal object has less refcnt than expected " + "_PyObject_IMMORTAL_REFCNT"); + } +} +#endif + +/* The following is auto-generated by Tools/build/generate_global_objects.py. */ +#ifdef Py_DEBUG +static inline void +_PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { + /* generated runtime-global */ + // (see pycore_runtime_init_generated.h) + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 0]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 6]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 7]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 8]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 9]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 10]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 11]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 12]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 13]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 14]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 15]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 16]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 17]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 18]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 19]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 20]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 21]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 22]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 23]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 24]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 25]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 26]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 27]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 28]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 29]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 30]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 31]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 32]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 33]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 34]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 35]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 36]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 37]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 38]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 39]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 40]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 41]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 42]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 43]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 44]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 45]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 46]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 47]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 48]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 49]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 50]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 51]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 52]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 53]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 54]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 55]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 56]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 57]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 58]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 59]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 60]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 61]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 62]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 63]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 64]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 65]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 66]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 67]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 68]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 69]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 70]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 71]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 72]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 73]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 74]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 75]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 76]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 77]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 78]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 79]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 80]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 81]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 82]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 83]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 84]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 85]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 86]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 87]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 88]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 89]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 90]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 91]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 92]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 93]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 94]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 95]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 96]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 97]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 98]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 99]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 100]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 101]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 102]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 103]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 104]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 105]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 106]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 107]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 108]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 109]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 110]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 111]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 112]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 113]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 114]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 115]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 116]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 117]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 118]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 119]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 120]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 121]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 122]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 123]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 124]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 125]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 126]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 127]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 129]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 130]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 131]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 132]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 133]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 134]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 135]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 136]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 137]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 138]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 139]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 140]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 141]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 142]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 143]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 144]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 145]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 146]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 147]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 148]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 149]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 150]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 151]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 152]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 153]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 154]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 155]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 156]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 157]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 158]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 159]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 160]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 161]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 162]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 163]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 164]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 165]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 166]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 167]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 168]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 169]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 170]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 171]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 172]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 173]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 174]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 175]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 176]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 177]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 178]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 179]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 180]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 181]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 182]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 183]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 184]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 185]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 186]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 187]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 188]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 189]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 190]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 191]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 192]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 193]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 194]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 195]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 196]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 197]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 198]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 199]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 200]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 201]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 202]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 203]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 204]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 205]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 206]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 207]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 208]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 209]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 210]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 211]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 212]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 213]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 214]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 215]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 216]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 217]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 218]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 219]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 220]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 221]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 222]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 223]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 224]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 225]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 226]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 227]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 228]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 229]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 230]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 231]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 232]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 233]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 234]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 235]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 236]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 237]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 238]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 239]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 240]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 241]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 242]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 243]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 244]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 245]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 246]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 247]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 248]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 249]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 250]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 251]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 252]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 253]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 254]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 255]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 256]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[0]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[6]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[7]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[8]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[9]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[10]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[11]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[12]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[13]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[14]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[15]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[16]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[17]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[18]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[19]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[20]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[21]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[22]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[23]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[24]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[25]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[26]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[27]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[28]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[29]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[30]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[31]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[32]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[33]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[34]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[35]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[36]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[37]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[38]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[39]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[40]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[41]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[42]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[43]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[44]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[45]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[46]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[47]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[48]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[49]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[50]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[51]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[52]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[53]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[54]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[55]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[56]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[57]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[58]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[59]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[60]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[61]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[62]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[63]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[64]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[65]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[66]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[67]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[68]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[69]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[70]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[71]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[72]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[73]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[74]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[75]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[76]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[77]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[78]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[79]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[80]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[81]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[82]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[83]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[84]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[85]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[86]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[87]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[88]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[89]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[90]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[91]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[92]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[93]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[94]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[95]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[96]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[97]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[98]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[99]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[100]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[101]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[102]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[103]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[104]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[105]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[106]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[107]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[108]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[109]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[110]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[111]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[112]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[113]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[114]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[115]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[116]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[117]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[118]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[119]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[120]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[121]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[122]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[123]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[124]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[125]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[126]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[127]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[129]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[130]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[131]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[132]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[133]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[134]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[135]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[136]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[137]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[138]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[139]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[140]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[141]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[142]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[143]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[144]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[145]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[146]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[147]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[148]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[149]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[150]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[151]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[152]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[153]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[154]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[155]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[156]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[157]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[158]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[159]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[160]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[161]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[162]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[163]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[164]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[165]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[166]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[167]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[168]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[169]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[170]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[171]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[172]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[173]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[174]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[175]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[176]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[177]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[178]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[179]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[180]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[181]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[182]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[183]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[184]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[185]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[186]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[187]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[188]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[189]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[190]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[191]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[192]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[193]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[194]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[195]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[196]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[197]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[198]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[199]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[200]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[201]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[202]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[203]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[204]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[205]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[206]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[207]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[208]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[209]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[210]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[211]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[212]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[213]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[214]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[215]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[216]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[217]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[218]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[219]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[220]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[221]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[222]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[223]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[224]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[225]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[226]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[227]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[228]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[229]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[230]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[231]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[232]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[233]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[234]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[235]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[236]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[237]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[238]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[239]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[240]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[241]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[242]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[243]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[244]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[245]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[246]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[247]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[248]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[249]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[250]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[251]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[252]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[253]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[254]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[255]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_dictcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_genexpr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_lambda)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_listcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_setcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_string)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_unknown)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(close_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dbl_close_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dbl_open_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dbl_percent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dot)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dot_locals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(json_decoder)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(list_err)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(newline)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(open_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(percent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(shim_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(utf_8)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(CANCELLED)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(FINISHED)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(False)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(JSONDecodeError)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(PENDING)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(Py_Repr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(TextIOWrapper)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(True)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(WarningMessage)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__IOBase_closed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__abc_tpflags__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__abs__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__abstractmethods__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__add__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__aenter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__aexit__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__aiter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__all__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__and__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__anext__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__annotations__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__args__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__asyncio_running_event_loop__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__await__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bases__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bool__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__build_class__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__builtins__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bytes__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__call__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__cantrace__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__class__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__class_getitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__classcell__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__complex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__contains__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__copy__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ctypes_from_outparam__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__del__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__delattr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__delete__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__delitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__dict__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__dictoffset__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__dir__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__divmod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__doc__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__enter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__eq__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__exit__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__file__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__float__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__floordiv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__format__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__fspath__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ge__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__get__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getattr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getattribute__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getinitargs__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getnewargs__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getnewargs_ex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getstate__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__gt__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__hash__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__iadd__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__iand__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ifloordiv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ilshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__imatmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__imod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__import__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__imul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__index__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__init__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__init_subclass__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__instancecheck__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__int__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__invert__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ior__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ipow__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__irshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__isabstractmethod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__isub__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__iter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__itruediv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ixor__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__le__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__len__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__length_hint__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__lltrace__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__loader__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__lshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__lt__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__main__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__matmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__missing__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__mod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__module__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__mro_entries__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__mul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__name__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ne__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__neg__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__new__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__newobj__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__newobj_ex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__next__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__notes__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__or__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__orig_class__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__origin__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__package__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__parameters__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__path__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__pos__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__pow__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__prepare__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__qualname__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__radd__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rand__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rdivmod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reduce__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reduce_ex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__repr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reversed__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rfloordiv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rlshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rmatmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rmod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ror__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__round__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rpow__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rrshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rsub__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rtruediv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rxor__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__set__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__set_name__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__setattr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__setitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__setstate__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__sizeof__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__slotnames__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__slots__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__spec__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__str__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__sub__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__subclasscheck__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__subclasshook__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__truediv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__trunc__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_is_unpacked_typevartuple__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_prepare_subst__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_subst__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_unpacked_tuple_args__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__warningregistry__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__weaklistoffset__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__weakref__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__xor__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_abc_impl)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_abstract_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_annotation)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_anonymous_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_argtypes_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_as_parameter_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_asyncio_future_blocking)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_blksize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_bootstrap)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_check_retval_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_dealloc_warn)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_feature_version)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_fields_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_finalizing)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_find_and_load)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_fix_up_module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_flags_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_get_sourcefile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_handle_fromlist)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_initializing)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_is_text_encoding)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_length_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_lock_unlock_module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_needs_com_addref_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_pack_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_restype_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_showwarnmsg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_shutdown)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_slotnames)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_strptime_datetime)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_swappedbytes_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_type_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_uninitialized_submodules)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_warn_unawaited_coroutine)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_xoptions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(a)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(abs_tol)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(access)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(add)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(add_done_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(after_in_child)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(after_in_parent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(aggregate_class)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(append)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(argdefs)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(arguments)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(argv)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(as_integer_ratio)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ast)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(attribute)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(authorizer_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(autocommit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(b)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(backtick)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(base)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(before)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(big)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(binary_form)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(block)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffer)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffer_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffer_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffering)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffers)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(bufsize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(builtins)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(byteorder)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(bytes)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(bytes_per_sep)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_call)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_exception)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_return)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cached_statements)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cadata)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cafile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(call)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(call_exception_handler)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(call_soon)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cancel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(capath)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(category)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cb_type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(certfile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(check_same_thread)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(clear)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(close)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(closed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(closefd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(closure)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_argcount)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_cellvars)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_code)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_consts)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_exceptiontable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_filename)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_firstlineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_flags)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_freevars)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_kwonlyargcount)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_linetable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_names)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_nlocals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_posonlyargcount)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_qualname)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_stacksize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_varnames)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(code)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(command)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(comment_factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(consts)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(context)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cookie)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(copy)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(copyreg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(coro)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(count)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cwd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(d)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(data)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(database)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(decode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(decoder)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(default)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(defaultaction)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(delete)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(depth)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(detect_types)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(deterministic)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(device)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dict)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dictcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(difference_update)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(digest)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(digest_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(digestmod)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dir_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(discard)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dispatch_table)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(displayhook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dklen)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(doc)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dont_inherit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dst)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dst_dir_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(duration)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(e)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(effective_ids)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(element_factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(encode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(encoding)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_lineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_offset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(endpos)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(env)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(errors)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(event)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(eventmask)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exc_type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exc_value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(excepthook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exception)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(extend)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(facility)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(false)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(family)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fanout)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fd2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fdel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fget)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(file)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(file_actions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(filename)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fileno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(filepath)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fillvalue)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(filters)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(final)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(find_class)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fix_imports)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(flags)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(flush)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(follow_symlinks)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(format)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(frequency)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(from_param)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fromlist)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fromtimestamp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fromutc)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(func)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(future)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(generation)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(genexpr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_debug)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_event_loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_source)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(getattr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(getstate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(gid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(globals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(groupindex)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(groups)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(handle)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hash_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(header)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(headers)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hi)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(id)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ident)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ignore)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(imag)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(importlib)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(in_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(incoming)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(indexgroup)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(inf)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(inheritable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initial)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initial_bytes)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initial_value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initval)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(inner_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(input)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(insert_comments)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(insert_pis)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(instructions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intern)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intersection)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isatty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isinstance)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isoformat)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isolation_level)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(istext)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(item)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(items)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(iter)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(iterable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(iterations)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(join)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(jump)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(keepends)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(key)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(keyfile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(keys)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kind)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lambda)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_node)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_traceback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(latin1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(leaf_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(len)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(length)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(level)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(limit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(line)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(line_buffering)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(listcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(little)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lo)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(locale)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(locals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(logoption)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mapping)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(match)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(max_length)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxdigits)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxevents)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxmem)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxsplit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxvalue)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(memLevel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(memlimit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(message)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(metaclass)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(method)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mod)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(module_globals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(modules)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mro)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(msg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mycmp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_arg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_fields)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_sequence_fields)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_unnamed_fields)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(name_from)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(namespace_separator)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(namespaces)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(narg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ndigits)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(new_limit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(newline)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(newlines)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(next)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(node_depth)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(node_offset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ns)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(nstype)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(nt)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(null)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(number)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(obj)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(object)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(offset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(offset_dst)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(offset_src)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(on_type_read)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(onceregistry)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(only_keys)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(oparg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(opcode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(open)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(opener)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(operation)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(optimize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(options)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(order)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(out_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(outgoing)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(overlapped)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(owner)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(p)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pages)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(parent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(password)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(path)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pattern)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(peek)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(persistent_id)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(persistent_load)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(person)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pi_factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(policy)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(posix)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(print_file_and_line)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(priority)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(progress)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(progress_handler)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(proto)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(protocol)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ps1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ps2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(query)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(quotetabs)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(r)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(raw)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(read)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(read1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readall)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readinto)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readinto1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readline)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readonly)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(real)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reducer_override)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(registry)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(rel_tol)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reload)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(repl)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(replace)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reserved)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(resetids)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(return)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reverse)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reversed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(s)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(salt)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sched_priority)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(scheduler)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(seek)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(seekable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(selectors)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(self)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(send)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sep)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sequence)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(server_hostname)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(server_side)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(session)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setpgroup)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setsid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setsigdef)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setsigmask)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setstate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(shape)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(show_cmd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(signed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sizehint)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sleep)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sock)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sort)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sound)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(source)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(source_traceback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(src)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(src_dir_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stacklevel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(start)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(statement)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(status)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stderr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stdin)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stdout)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(step)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(store_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strategy)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strftime)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strict)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strict_mode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(string)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sub_key)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(symmetric_difference_update)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tabsize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tag)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(target)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(target_is_directory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(task)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_frame)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_lasti)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_lineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_next)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tell)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(template)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(term)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(text)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(threading)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(throw)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(timeout)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(times)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(timetuple)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(top)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(trace_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(traceback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(trailers)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(translate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(true)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(truncate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(twice)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(txt)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tz)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tzname)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(uid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(unlink)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(unraisablehook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(uri)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(usedforsecurity)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(values)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(version)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(warnings)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(warnoptions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(wbits)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(week)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(weekday)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(which)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(who)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(withdata)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(writable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(write)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(write_through)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(x)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(year)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(zdict)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[0]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[6]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[7]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[8]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[9]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[10]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[11]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[12]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[13]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[14]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[15]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[16]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[17]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[18]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[19]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[20]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[21]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[22]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[23]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[24]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[25]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[26]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[27]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[28]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[29]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[30]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[31]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[32]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[33]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[34]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[35]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[36]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[37]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[38]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[39]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[40]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[41]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[42]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[43]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[44]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[45]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[46]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[47]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[48]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[49]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[50]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[51]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[52]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[53]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[54]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[55]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[56]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[57]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[58]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[59]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[60]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[61]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[62]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[63]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[64]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[65]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[66]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[67]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[68]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[69]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[70]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[71]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[72]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[73]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[74]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[75]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[76]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[77]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[78]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[79]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[80]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[81]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[82]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[83]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[84]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[85]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[86]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[87]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[88]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[89]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[90]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[91]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[92]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[93]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[94]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[95]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[96]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[97]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[98]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[99]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[100]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[101]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[102]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[103]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[104]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[105]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[106]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[107]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[108]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[109]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[110]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[111]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[112]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[113]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[114]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[115]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[116]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[117]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[118]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[119]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[120]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[121]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[122]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[123]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[124]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[125]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[126]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[127]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[128 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[129 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[130 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[131 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[132 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[133 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[134 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[135 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[136 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[137 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[138 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[139 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[140 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[141 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[142 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[143 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[144 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[145 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[146 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[147 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[148 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[149 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[150 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[151 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[152 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[153 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[154 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[155 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[156 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[157 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[158 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[159 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[160 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[161 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[162 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[163 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[164 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[165 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[166 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[167 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[168 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[169 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[170 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[171 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[172 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[173 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[174 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[175 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[176 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[177 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[178 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[179 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[180 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[181 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[182 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[183 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[184 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[185 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[186 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[187 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[188 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[189 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[190 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[191 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[192 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[193 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[194 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[195 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[196 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[197 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[198 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[199 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[200 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[201 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[202 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[203 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[204 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[205 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[206 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[207 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[208 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[209 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[210 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[211 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[212 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[213 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[214 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[215 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[216 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[217 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[218 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[219 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[220 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[221 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[222 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[223 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[224 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[225 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[226 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[227 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[228 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[229 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[230 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[231 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[232 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[233 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[234 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[235 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[236 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[237 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[238 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[239 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[240 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[241 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[242 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[243 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[244 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[245 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[246 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[247 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[248 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[249 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[250 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[251 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[252 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[253 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[254 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[255 - 128]); + /* non-generated */ + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(tuple_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(hamt_bitmap_node_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_INTERP_SINGLETON(interp, hamt_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(context_token_missing)); +} +#endif // Py_DEBUG +/* End auto-generated code */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_GLOBAL_OBJECTS_FINI_GENERATED_INIT_H */ diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index 43f4dac8938677..acb9a4fbb92dce 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -11,7 +11,7 @@ extern "C" { // The data structure & init here are inspired by Tools/build/deepfreeze.py. // All field names generated by ASCII_STR() have a common prefix, -// to help avoid collisions with keywords, etc. +// to help avoid collisions with keywords, macros, etc. #define STRUCT_FOR_ASCII_STR(LITERAL) \ struct { \ @@ -19,9 +19,9 @@ extern "C" { uint8_t _data[sizeof(LITERAL)]; \ } #define STRUCT_FOR_STR(NAME, LITERAL) \ - STRUCT_FOR_ASCII_STR(LITERAL) _ ## NAME; + STRUCT_FOR_ASCII_STR(LITERAL) _py_ ## NAME; #define STRUCT_FOR_ID(NAME) \ - STRUCT_FOR_ASCII_STR(#NAME) _ ## NAME; + STRUCT_FOR_ASCII_STR(#NAME) _py_ ## NAME; // XXX Order by frequency of use? @@ -37,22 +37,27 @@ struct _Py_global_strings { STRUCT_FOR_STR(anon_string, "<string>") STRUCT_FOR_STR(anon_unknown, "<unknown>") STRUCT_FOR_STR(close_br, "}") - STRUCT_FOR_STR(comma_sep, ", ") STRUCT_FOR_STR(dbl_close_br, "}}") STRUCT_FOR_STR(dbl_open_br, "{{") STRUCT_FOR_STR(dbl_percent, "%%") STRUCT_FOR_STR(dot, ".") STRUCT_FOR_STR(dot_locals, ".<locals>") STRUCT_FOR_STR(empty, "") + STRUCT_FOR_STR(json_decoder, "json.decoder") STRUCT_FOR_STR(list_err, "list index out of range") STRUCT_FOR_STR(newline, "\n") STRUCT_FOR_STR(open_br, "{") STRUCT_FOR_STR(percent, "%") + STRUCT_FOR_STR(shim_name, "<shim>") STRUCT_FOR_STR(utf_8, "utf-8") } literals; struct { + STRUCT_FOR_ID(CANCELLED) + STRUCT_FOR_ID(FINISHED) STRUCT_FOR_ID(False) + STRUCT_FOR_ID(JSONDecodeError) + STRUCT_FOR_ID(PENDING) STRUCT_FOR_ID(Py_Repr) STRUCT_FOR_ID(TextIOWrapper) STRUCT_FOR_ID(True) @@ -71,6 +76,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(__anext__) STRUCT_FOR_ID(__annotations__) STRUCT_FOR_ID(__args__) + STRUCT_FOR_ID(__asyncio_running_event_loop__) STRUCT_FOR_ID(__await__) STRUCT_FOR_ID(__bases__) STRUCT_FOR_ID(__bool__) @@ -85,6 +91,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(__complex__) STRUCT_FOR_ID(__contains__) STRUCT_FOR_ID(__copy__) + STRUCT_FOR_ID(__ctypes_from_outparam__) STRUCT_FOR_ID(__del__) STRUCT_FOR_ID(__delattr__) STRUCT_FOR_ID(__delete__) @@ -211,22 +218,38 @@ struct _Py_global_strings { STRUCT_FOR_ID(__weakref__) STRUCT_FOR_ID(__xor__) STRUCT_FOR_ID(_abc_impl) + STRUCT_FOR_ID(_abstract_) STRUCT_FOR_ID(_annotation) + STRUCT_FOR_ID(_anonymous_) + STRUCT_FOR_ID(_argtypes_) + STRUCT_FOR_ID(_as_parameter_) + STRUCT_FOR_ID(_asyncio_future_blocking) STRUCT_FOR_ID(_blksize) STRUCT_FOR_ID(_bootstrap) + STRUCT_FOR_ID(_check_retval_) STRUCT_FOR_ID(_dealloc_warn) STRUCT_FOR_ID(_feature_version) + STRUCT_FOR_ID(_fields_) STRUCT_FOR_ID(_finalizing) STRUCT_FOR_ID(_find_and_load) STRUCT_FOR_ID(_fix_up_module) + STRUCT_FOR_ID(_flags_) STRUCT_FOR_ID(_get_sourcefile) STRUCT_FOR_ID(_handle_fromlist) STRUCT_FOR_ID(_initializing) STRUCT_FOR_ID(_is_text_encoding) + STRUCT_FOR_ID(_length_) STRUCT_FOR_ID(_lock_unlock_module) + STRUCT_FOR_ID(_loop) + STRUCT_FOR_ID(_needs_com_addref_) + STRUCT_FOR_ID(_pack_) + STRUCT_FOR_ID(_restype_) STRUCT_FOR_ID(_showwarnmsg) STRUCT_FOR_ID(_shutdown) STRUCT_FOR_ID(_slotnames) + STRUCT_FOR_ID(_strptime_datetime) + STRUCT_FOR_ID(_swappedbytes_) + STRUCT_FOR_ID(_type_) STRUCT_FOR_ID(_uninitialized_submodules) STRUCT_FOR_ID(_warn_unawaited_coroutine) STRUCT_FOR_ID(_xoptions) @@ -234,6 +257,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(abs_tol) STRUCT_FOR_ID(access) STRUCT_FOR_ID(add) + STRUCT_FOR_ID(add_done_callback) STRUCT_FOR_ID(after_in_child) STRUCT_FOR_ID(after_in_parent) STRUCT_FOR_ID(aggregate_class) @@ -241,8 +265,11 @@ struct _Py_global_strings { STRUCT_FOR_ID(argdefs) STRUCT_FOR_ID(arguments) STRUCT_FOR_ID(argv) + STRUCT_FOR_ID(as_integer_ratio) + STRUCT_FOR_ID(ast) STRUCT_FOR_ID(attribute) STRUCT_FOR_ID(authorizer_callback) + STRUCT_FOR_ID(autocommit) STRUCT_FOR_ID(b) STRUCT_FOR_ID(backtick) STRUCT_FOR_ID(base) @@ -260,6 +287,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(byteorder) STRUCT_FOR_ID(bytes) STRUCT_FOR_ID(bytes_per_sep) + STRUCT_FOR_ID(c) STRUCT_FOR_ID(c_call) STRUCT_FOR_ID(c_exception) STRUCT_FOR_ID(c_return) @@ -267,6 +295,9 @@ struct _Py_global_strings { STRUCT_FOR_ID(cadata) STRUCT_FOR_ID(cafile) STRUCT_FOR_ID(call) + STRUCT_FOR_ID(call_exception_handler) + STRUCT_FOR_ID(call_soon) + STRUCT_FOR_ID(cancel) STRUCT_FOR_ID(capath) STRUCT_FOR_ID(category) STRUCT_FOR_ID(cb_type) @@ -306,6 +337,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(coro) STRUCT_FOR_ID(count) STRUCT_FOR_ID(cwd) + STRUCT_FOR_ID(d) STRUCT_FOR_ID(data) STRUCT_FOR_ID(database) STRUCT_FOR_ID(decode) @@ -324,6 +356,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(digest_size) STRUCT_FOR_ID(digestmod) STRUCT_FOR_ID(dir_fd) + STRUCT_FOR_ID(discard) STRUCT_FOR_ID(dispatch_table) STRUCT_FOR_ID(displayhook) STRUCT_FOR_ID(dklen) @@ -332,6 +365,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(dst) STRUCT_FOR_ID(dst_dir_fd) STRUCT_FOR_ID(duration) + STRUCT_FOR_ID(e) STRUCT_FOR_ID(effective_ids) STRUCT_FOR_ID(element_factory) STRUCT_FOR_ID(encode) @@ -352,6 +386,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(extend) STRUCT_FOR_ID(facility) STRUCT_FOR_ID(factory) + STRUCT_FOR_ID(false) STRUCT_FOR_ID(family) STRUCT_FOR_ID(fanout) STRUCT_FOR_ID(fd) @@ -373,12 +408,19 @@ struct _Py_global_strings { STRUCT_FOR_ID(follow_symlinks) STRUCT_FOR_ID(format) STRUCT_FOR_ID(frequency) + STRUCT_FOR_ID(from_param) STRUCT_FOR_ID(fromlist) + STRUCT_FOR_ID(fromtimestamp) + STRUCT_FOR_ID(fromutc) STRUCT_FOR_ID(fset) STRUCT_FOR_ID(func) + STRUCT_FOR_ID(future) STRUCT_FOR_ID(generation) STRUCT_FOR_ID(genexpr) STRUCT_FOR_ID(get) + STRUCT_FOR_ID(get_debug) + STRUCT_FOR_ID(get_event_loop) + STRUCT_FOR_ID(get_loop) STRUCT_FOR_ID(get_source) STRUCT_FOR_ID(getattr) STRUCT_FOR_ID(getstate) @@ -415,6 +457,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(intersection) STRUCT_FOR_ID(isatty) STRUCT_FOR_ID(isinstance) + STRUCT_FOR_ID(isoformat) STRUCT_FOR_ID(isolation_level) STRUCT_FOR_ID(istext) STRUCT_FOR_ID(item) @@ -429,6 +472,9 @@ struct _Py_global_strings { STRUCT_FOR_ID(keyfile) STRUCT_FOR_ID(keys) STRUCT_FOR_ID(kind) + STRUCT_FOR_ID(kw) + STRUCT_FOR_ID(kw1) + STRUCT_FOR_ID(kw2) STRUCT_FOR_ID(lambda) STRUCT_FOR_ID(last) STRUCT_FOR_ID(last_node) @@ -491,6 +537,8 @@ struct _Py_global_strings { STRUCT_FOR_ID(node_offset) STRUCT_FOR_ID(ns) STRUCT_FOR_ID(nstype) + STRUCT_FOR_ID(nt) + STRUCT_FOR_ID(null) STRUCT_FOR_ID(number) STRUCT_FOR_ID(obj) STRUCT_FOR_ID(object) @@ -526,6 +574,9 @@ struct _Py_global_strings { STRUCT_FOR_ID(pid) STRUCT_FOR_ID(policy) STRUCT_FOR_ID(pos) + STRUCT_FOR_ID(pos1) + STRUCT_FOR_ID(pos2) + STRUCT_FOR_ID(posix) STRUCT_FOR_ID(print_file_and_line) STRUCT_FOR_ID(priority) STRUCT_FOR_ID(progress) @@ -566,6 +617,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(seek) STRUCT_FOR_ID(seekable) STRUCT_FOR_ID(selectors) + STRUCT_FOR_ID(self) STRUCT_FOR_ID(send) STRUCT_FOR_ID(sep) STRUCT_FOR_ID(sequence) @@ -588,6 +640,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(sort) STRUCT_FOR_ID(sound) STRUCT_FOR_ID(source) + STRUCT_FOR_ID(source_traceback) STRUCT_FOR_ID(src) STRUCT_FOR_ID(src_dir_fd) STRUCT_FOR_ID(stacklevel) @@ -600,6 +653,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(step) STRUCT_FOR_ID(store_name) STRUCT_FOR_ID(strategy) + STRUCT_FOR_ID(strftime) STRUCT_FOR_ID(strict) STRUCT_FOR_ID(strict_mode) STRUCT_FOR_ID(string) @@ -622,16 +676,19 @@ struct _Py_global_strings { STRUCT_FOR_ID(throw) STRUCT_FOR_ID(timeout) STRUCT_FOR_ID(times) + STRUCT_FOR_ID(timetuple) STRUCT_FOR_ID(top) STRUCT_FOR_ID(trace_callback) STRUCT_FOR_ID(traceback) STRUCT_FOR_ID(trailers) STRUCT_FOR_ID(translate) + STRUCT_FOR_ID(true) STRUCT_FOR_ID(truncate) STRUCT_FOR_ID(twice) STRUCT_FOR_ID(txt) STRUCT_FOR_ID(type) STRUCT_FOR_ID(tz) + STRUCT_FOR_ID(tzname) STRUCT_FOR_ID(uid) STRUCT_FOR_ID(unlink) STRUCT_FOR_ID(unraisablehook) @@ -671,9 +728,9 @@ struct _Py_global_strings { #define _Py_ID(NAME) \ - (_Py_SINGLETON(strings.identifiers._ ## NAME._ascii.ob_base)) + (_Py_SINGLETON(strings.identifiers._py_ ## NAME._ascii.ob_base)) #define _Py_STR(NAME) \ - (_Py_SINGLETON(strings.literals._ ## NAME._ascii.ob_base)) + (_Py_SINGLETON(strings.literals._py_ ## NAME._ascii.ob_base)) /* _Py_DECLARE_STR() should precede all uses of _Py_STR() in a function. diff --git a/Include/internal/pycore_hamt.h b/Include/internal/pycore_hamt.h index 7a674b4ee4ac0e..d8742c7cb63578 100644 --- a/Include/internal/pycore_hamt.h +++ b/Include/internal/pycore_hamt.h @@ -28,10 +28,6 @@ extern PyTypeObject _PyHamtKeys_Type; extern PyTypeObject _PyHamtValues_Type; extern PyTypeObject _PyHamtItems_Type; -/* runtime lifecycle */ - -void _PyHamt_Fini(PyInterpreterState *); - /* other API */ @@ -53,6 +49,13 @@ typedef struct { } PyHamtObject; +typedef struct { + PyObject_VAR_HEAD + uint32_t b_bitmap; + PyObject *b_array[1]; +} PyHamtNode_Bitmap; + + /* A struct to hold the state of depth-first traverse of the tree. HAMT is an immutable collection. Iterators will hold a strong reference diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 2aa23a24c2830a..6501ab14d27684 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -18,9 +18,9 @@ typedef struct { _Py_slist_item_t *head; } _Py_slist_t; -#define _Py_SLIST_ITEM_NEXT(ITEM) (((_Py_slist_item_t *)(ITEM))->next) +#define _Py_SLIST_ITEM_NEXT(ITEM) _Py_RVALUE(((_Py_slist_item_t *)(ITEM))->next) -#define _Py_SLIST_HEAD(SLIST) (((_Py_slist_t *)(SLIST))->head) +#define _Py_SLIST_HEAD(SLIST) _Py_RVALUE(((_Py_slist_t *)(SLIST))->head) /* _Py_hashtable: table entry */ diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index aee1f66a3ea171..9036dff6725330 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -5,6 +5,38 @@ extern "C" { #endif + +struct _import_runtime_state { + /* The builtin modules (defined in config.c). */ + struct _inittab *inittab; + /* The most recent value assigned to a PyModuleDef.m_base.m_index. + This is incremented each time PyModuleDef_Init() is called, + which is just about every time an extension module is imported. + See PyInterpreterState.modules_by_index for more info. */ + Py_ssize_t last_module_index; + /* A dict mapping (filename, name) to PyModuleDef for modules. + Only legacy (single-phase init) extension modules are added + and only if they support multiple initialization (m_size >- 0) + or are imported in the main interpreter. + This is initialized lazily in _PyImport_FixupExtensionObject(). + Modules are added there and looked up in _imp.find_extension(). */ + PyObject *extensions; + /* The global import lock. */ + struct { + PyThread_type_lock mutex; + unsigned long thread; + int level; + } lock; + struct { + int import_level; + _PyTime_t accumulated; + int header; + } find_and_load; + /* Package context -- the full module name for package imports */ + const char * pkgcontext; +}; + + #ifdef HAVE_FORK extern PyStatus _PyImport_ReInitLock(void); #endif diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 6f8fbeb1ba3e31..0e3d46852f2e6d 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -12,48 +12,22 @@ extern "C" { #include "pycore_atomic.h" // _Py_atomic_address #include "pycore_ast_state.h" // struct ast_state +#include "pycore_ceval_state.h" // struct _ceval_state #include "pycore_code.h" // struct callable_cache #include "pycore_context.h" // struct _Py_context_state -#include "pycore_dict.h" // struct _Py_dict_state +#include "pycore_dict_state.h" // struct _Py_dict_state #include "pycore_exceptions.h" // struct _Py_exc_state #include "pycore_floatobject.h" // struct _Py_float_state +#include "pycore_function.h" // FUNC_MAX_WATCHERS #include "pycore_genobject.h" // struct _Py_async_gen_state #include "pycore_gc.h" // struct _gc_runtime_state #include "pycore_list.h" // struct _Py_list_state +#include "pycore_global_objects.h" // struct _Py_interp_static_objects #include "pycore_tuple.h" // struct _Py_tuple_state #include "pycore_typeobject.h" // struct type_cache #include "pycore_unicodeobject.h" // struct _Py_unicode_state #include "pycore_warnings.h" // struct _warnings_runtime_state -struct _pending_calls { - PyThread_type_lock lock; - /* Request for running pending calls. */ - _Py_atomic_int calls_to_do; - /* Request for looking at the `async_exc` field of the current - thread state. - Guarded by the GIL. */ - int async_exc; -#define NPENDINGCALLS 32 - struct { - int (*func)(void *); - void *arg; - } calls[NPENDINGCALLS]; - int first; - int last; -}; - -struct _ceval_state { - int recursion_limit; - /* This single variable consolidates all requests to break out of - the fast path in the eval loop. */ - _Py_atomic_int eval_breaker; - /* Request for dropping the GIL */ - _Py_atomic_int gil_drop_request; - /* The GC is ready to be executed */ - _Py_atomic_int gc_scheduled; - struct _pending_calls pending; -}; - // atexit state typedef struct { @@ -115,14 +89,30 @@ struct _is { int _initialized; int finalizing; - /* Was this interpreter statically allocated? */ - bool _static; - struct _ceval_state ceval; struct _gc_runtime_state gc; // sys.modules dictionary PyObject *modules; + /* This is the list of module objects for all legacy (single-phase init) + extension modules ever loaded in this process (i.e. imported + in this interpreter or in any other). Py_None stands in for + modules that haven't actually been imported in this interpreter. + + A module's index (PyModuleDef.m_base.m_index) is used to look up + the corresponding module object for this interpreter, if any. + (See PyState_FindModule().) When any extension module + is initialized during import, its moduledef gets initialized by + PyModuleDef_Init(), and the first time that happens for each + PyModuleDef, its index gets set to the current value of + a global counter (see _PyRuntimeState.imports.last_module_index). + The entry for that index in this interpreter remains unset until + the module is actually imported here. (Py_None is used as + a placeholder.) Note that multi-phase init modules always get + an index for which there will never be a module set. + + This is initialized lazily in _PyState_AddModule(), which is also + where modules get added. */ PyObject *modules_by_index; // Dictionary of the sys module PyObject *sysdict; @@ -152,7 +142,9 @@ struct _is { // Initialized to _PyEval_EvalFrameDefault(). _PyFrameEvalFunction eval_frame; - PyDict_WatchCallback dict_watchers[DICT_MAX_WATCHERS]; + PyFunction_WatchCallback func_watchers[FUNC_MAX_WATCHERS]; + // One bit is set for each non-NULL entry in func_watchers + uint8_t active_func_watchers; Py_ssize_t co_extra_user_count; freefunc co_extra_freefuncs[MAX_CO_EXTRA_USERS]; @@ -168,6 +160,9 @@ struct _is { PyObject *audit_hooks; PyType_WatchCallback type_watchers[TYPE_MAX_WATCHERS]; + PyCode_WatchCallback code_watchers[CODE_MAX_WATCHERS]; + // One bit is set for each non-NULL entry in code_watchers + uint8_t active_code_watchers; struct _Py_unicode_state unicode; struct _Py_float_state float_state; @@ -186,6 +181,10 @@ struct _is { struct ast_state ast; struct types_state types; struct callable_cache callable_cache; + PyCodeObject *interpreter_trampoline; + + struct _Py_interp_cached_objects cached_objects; + struct _Py_interp_static_objects static_objects; /* The following fields are here to avoid allocation during init. The data is exposed through PyInterpreterState pointer fields. @@ -219,9 +218,10 @@ extern void _PyInterpreterState_Clear(PyThreadState *tstate); struct _xidregitem; struct _xidregitem { - PyTypeObject *cls; - crossinterpdatafunc getdata; + struct _xidregitem *prev; struct _xidregitem *next; + PyObject *cls; // weakref to a PyTypeObject + crossinterpdatafunc getdata; }; PyAPI_FUNC(PyInterpreterState*) _PyInterpreterState_LookUpID(int64_t); diff --git a/Include/internal/pycore_list.h b/Include/internal/pycore_list.h index 691d13bc8d9ffa..628267cc8a9618 100644 --- a/Include/internal/pycore_list.h +++ b/Include/internal/pycore_list.h @@ -35,7 +35,7 @@ struct _Py_list_state { #endif }; -#define _PyList_ITEMS(op) (_PyList_CAST(op)->ob_item) +#define _PyList_ITEMS(op) _Py_RVALUE(_PyList_CAST(op)->ob_item) extern int _PyList_AppendTakeRefListResize(PyListObject *self, PyObject *newitem); diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 8b78f79e950e92..8796dfe2f6b8cf 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -355,8 +355,9 @@ extern int _PyType_HasSubclasses(PyTypeObject *); extern PyObject* _PyType_GetSubclasses(PyTypeObject *); // Access macro to the members which are floating "behind" the object -#define _PyHeapType_GET_MEMBERS(etype) \ - ((PyMemberDef *)(((char *)(etype)) + Py_TYPE(etype)->tp_basicsize)) +static inline PyMemberDef* _PyHeapType_GET_MEMBERS(PyHeapTypeObject *etype) { + return (PyMemberDef*)((char*)etype + Py_TYPE(etype)->tp_basicsize); +} PyAPI_FUNC(PyObject *) _PyObject_LookupSpecial(PyObject *, PyObject *); @@ -372,7 +373,7 @@ PyAPI_FUNC(PyObject *) _PyObject_LookupSpecial(PyObject *, PyObject *); * match. * * Third party code unintentionally rely on problematic fpcasts. The call - * trampoline mitigates common occurences of bad fpcasts on Emscripten. + * trampoline mitigates common occurrences of bad fpcasts on Emscripten. */ #if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) #define _PyCFunction_TrampolineCall(meth, self, args) \ diff --git a/Include/internal/pycore_obmalloc.h b/Include/internal/pycore_obmalloc.h new file mode 100644 index 00000000000000..a5c7f4528f9126 --- /dev/null +++ b/Include/internal/pycore_obmalloc.h @@ -0,0 +1,690 @@ +#ifndef Py_INTERNAL_OBMALLOC_H +#define Py_INTERNAL_OBMALLOC_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +typedef unsigned int pymem_uint; /* assuming >= 16 bits */ + +#undef uint +#define uint pymem_uint + + +/* An object allocator for Python. + + Here is an introduction to the layers of the Python memory architecture, + showing where the object allocator is actually used (layer +2), It is + called for every object allocation and deallocation (PyObject_New/Del), + unless the object-specific allocators implement a proprietary allocation + scheme (ex.: ints use a simple free list). This is also the place where + the cyclic garbage collector operates selectively on container objects. + + + Object-specific allocators + _____ ______ ______ ________ + [ int ] [ dict ] [ list ] ... [ string ] Python core | ++3 | <----- Object-specific memory -----> | <-- Non-object memory --> | + _______________________________ | | + [ Python's object allocator ] | | ++2 | ####### Object memory ####### | <------ Internal buffers ------> | + ______________________________________________________________ | + [ Python's raw memory allocator (PyMem_ API) ] | ++1 | <----- Python memory (under PyMem manager's control) ------> | | + __________________________________________________________________ + [ Underlying general-purpose allocator (ex: C library malloc) ] + 0 | <------ Virtual memory allocated for the python process -------> | + + ========================================================================= + _______________________________________________________________________ + [ OS-specific Virtual Memory Manager (VMM) ] +-1 | <--- Kernel dynamic storage allocation & management (page-based) ---> | + __________________________________ __________________________________ + [ ] [ ] +-2 | <-- Physical memory: ROM/RAM --> | | <-- Secondary storage (swap) --> | + +*/ +/*==========================================================================*/ + +/* A fast, special-purpose memory allocator for small blocks, to be used + on top of a general-purpose malloc -- heavily based on previous art. */ + +/* Vladimir Marangozov -- August 2000 */ + +/* + * "Memory management is where the rubber meets the road -- if we do the wrong + * thing at any level, the results will not be good. And if we don't make the + * levels work well together, we are in serious trouble." (1) + * + * (1) Paul R. Wilson, Mark S. Johnstone, Michael Neely, and David Boles, + * "Dynamic Storage Allocation: A Survey and Critical Review", + * in Proc. 1995 Int'l. Workshop on Memory Management, September 1995. + */ + +/* #undef WITH_MEMORY_LIMITS */ /* disable mem limit checks */ + +/*==========================================================================*/ + +/* + * Allocation strategy abstract: + * + * For small requests, the allocator sub-allocates <Big> blocks of memory. + * Requests greater than SMALL_REQUEST_THRESHOLD bytes are routed to the + * system's allocator. + * + * Small requests are grouped in size classes spaced 8 bytes apart, due + * to the required valid alignment of the returned address. Requests of + * a particular size are serviced from memory pools of 4K (one VMM page). + * Pools are fragmented on demand and contain free lists of blocks of one + * particular size class. In other words, there is a fixed-size allocator + * for each size class. Free pools are shared by the different allocators + * thus minimizing the space reserved for a particular size class. + * + * This allocation strategy is a variant of what is known as "simple + * segregated storage based on array of free lists". The main drawback of + * simple segregated storage is that we might end up with lot of reserved + * memory for the different free lists, which degenerate in time. To avoid + * this, we partition each free list in pools and we share dynamically the + * reserved space between all free lists. This technique is quite efficient + * for memory intensive programs which allocate mainly small-sized blocks. + * + * For small requests we have the following table: + * + * Request in bytes Size of allocated block Size class idx + * ---------------------------------------------------------------- + * 1-8 8 0 + * 9-16 16 1 + * 17-24 24 2 + * 25-32 32 3 + * 33-40 40 4 + * 41-48 48 5 + * 49-56 56 6 + * 57-64 64 7 + * 65-72 72 8 + * ... ... ... + * 497-504 504 62 + * 505-512 512 63 + * + * 0, SMALL_REQUEST_THRESHOLD + 1 and up: routed to the underlying + * allocator. + */ + +/*==========================================================================*/ + +/* + * -- Main tunable settings section -- + */ + +/* + * Alignment of addresses returned to the user. 8-bytes alignment works + * on most current architectures (with 32-bit or 64-bit address buses). + * The alignment value is also used for grouping small requests in size + * classes spaced ALIGNMENT bytes apart. + * + * You shouldn't change this unless you know what you are doing. + */ + +#if SIZEOF_VOID_P > 4 +#define ALIGNMENT 16 /* must be 2^N */ +#define ALIGNMENT_SHIFT 4 +#else +#define ALIGNMENT 8 /* must be 2^N */ +#define ALIGNMENT_SHIFT 3 +#endif + +/* Return the number of bytes in size class I, as a uint. */ +#define INDEX2SIZE(I) (((pymem_uint)(I) + 1) << ALIGNMENT_SHIFT) + +/* + * Max size threshold below which malloc requests are considered to be + * small enough in order to use preallocated memory pools. You can tune + * this value according to your application behaviour and memory needs. + * + * Note: a size threshold of 512 guarantees that newly created dictionaries + * will be allocated from preallocated memory pools on 64-bit. + * + * The following invariants must hold: + * 1) ALIGNMENT <= SMALL_REQUEST_THRESHOLD <= 512 + * 2) SMALL_REQUEST_THRESHOLD is evenly divisible by ALIGNMENT + * + * Although not required, for better performance and space efficiency, + * it is recommended that SMALL_REQUEST_THRESHOLD is set to a power of 2. + */ +#define SMALL_REQUEST_THRESHOLD 512 +#define NB_SMALL_SIZE_CLASSES (SMALL_REQUEST_THRESHOLD / ALIGNMENT) + +/* + * The system's VMM page size can be obtained on most unices with a + * getpagesize() call or deduced from various header files. To make + * things simpler, we assume that it is 4K, which is OK for most systems. + * It is probably better if this is the native page size, but it doesn't + * have to be. In theory, if SYSTEM_PAGE_SIZE is larger than the native page + * size, then `POOL_ADDR(p)->arenaindex' could rarely cause a segmentation + * violation fault. 4K is apparently OK for all the platforms that python + * currently targets. + */ +#define SYSTEM_PAGE_SIZE (4 * 1024) + +/* + * Maximum amount of memory managed by the allocator for small requests. + */ +#ifdef WITH_MEMORY_LIMITS +#ifndef SMALL_MEMORY_LIMIT +#define SMALL_MEMORY_LIMIT (64 * 1024 * 1024) /* 64 MB -- more? */ +#endif +#endif + +#if !defined(WITH_PYMALLOC_RADIX_TREE) +/* Use radix-tree to track arena memory regions, for address_in_range(). + * Enable by default since it allows larger pool sizes. Can be disabled + * using -DWITH_PYMALLOC_RADIX_TREE=0 */ +#define WITH_PYMALLOC_RADIX_TREE 1 +#endif + +#if SIZEOF_VOID_P > 4 +/* on 64-bit platforms use larger pools and arenas if we can */ +#define USE_LARGE_ARENAS +#if WITH_PYMALLOC_RADIX_TREE +/* large pools only supported if radix-tree is enabled */ +#define USE_LARGE_POOLS +#endif +#endif + +/* + * The allocator sub-allocates <Big> blocks of memory (called arenas) aligned + * on a page boundary. This is a reserved virtual address space for the + * current process (obtained through a malloc()/mmap() call). In no way this + * means that the memory arenas will be used entirely. A malloc(<Big>) is + * usually an address range reservation for <Big> bytes, unless all pages within + * this space are referenced subsequently. So malloc'ing big blocks and not + * using them does not mean "wasting memory". It's an addressable range + * wastage... + * + * Arenas are allocated with mmap() on systems supporting anonymous memory + * mappings to reduce heap fragmentation. + */ +#ifdef USE_LARGE_ARENAS +#define ARENA_BITS 20 /* 1 MiB */ +#else +#define ARENA_BITS 18 /* 256 KiB */ +#endif +#define ARENA_SIZE (1 << ARENA_BITS) +#define ARENA_SIZE_MASK (ARENA_SIZE - 1) + +#ifdef WITH_MEMORY_LIMITS +#define MAX_ARENAS (SMALL_MEMORY_LIMIT / ARENA_SIZE) +#endif + +/* + * Size of the pools used for small blocks. Must be a power of 2. + */ +#ifdef USE_LARGE_POOLS +#define POOL_BITS 14 /* 16 KiB */ +#else +#define POOL_BITS 12 /* 4 KiB */ +#endif +#define POOL_SIZE (1 << POOL_BITS) +#define POOL_SIZE_MASK (POOL_SIZE - 1) + +#if !WITH_PYMALLOC_RADIX_TREE +#if POOL_SIZE != SYSTEM_PAGE_SIZE +# error "pool size must be equal to system page size" +#endif +#endif + +#define MAX_POOLS_IN_ARENA (ARENA_SIZE / POOL_SIZE) +#if MAX_POOLS_IN_ARENA * POOL_SIZE != ARENA_SIZE +# error "arena size not an exact multiple of pool size" +#endif + +/* + * -- End of tunable settings section -- + */ + +/*==========================================================================*/ + +/* When you say memory, my mind reasons in terms of (pointers to) blocks */ +typedef uint8_t pymem_block; + +/* Pool for small blocks. */ +struct pool_header { + union { pymem_block *_padding; + uint count; } ref; /* number of allocated blocks */ + pymem_block *freeblock; /* pool's free list head */ + struct pool_header *nextpool; /* next pool of this size class */ + struct pool_header *prevpool; /* previous pool "" */ + uint arenaindex; /* index into arenas of base adr */ + uint szidx; /* block size class index */ + uint nextoffset; /* bytes to virgin block */ + uint maxnextoffset; /* largest valid nextoffset */ +}; + +typedef struct pool_header *poolp; + +/* Record keeping for arenas. */ +struct arena_object { + /* The address of the arena, as returned by malloc. Note that 0 + * will never be returned by a successful malloc, and is used + * here to mark an arena_object that doesn't correspond to an + * allocated arena. + */ + uintptr_t address; + + /* Pool-aligned pointer to the next pool to be carved off. */ + pymem_block* pool_address; + + /* The number of available pools in the arena: free pools + never- + * allocated pools. + */ + uint nfreepools; + + /* The total number of pools in the arena, whether or not available. */ + uint ntotalpools; + + /* Singly-linked list of available pools. */ + struct pool_header* freepools; + + /* Whenever this arena_object is not associated with an allocated + * arena, the nextarena member is used to link all unassociated + * arena_objects in the singly-linked `unused_arena_objects` list. + * The prevarena member is unused in this case. + * + * When this arena_object is associated with an allocated arena + * with at least one available pool, both members are used in the + * doubly-linked `usable_arenas` list, which is maintained in + * increasing order of `nfreepools` values. + * + * Else this arena_object is associated with an allocated arena + * all of whose pools are in use. `nextarena` and `prevarena` + * are both meaningless in this case. + */ + struct arena_object* nextarena; + struct arena_object* prevarena; +}; + +#define POOL_OVERHEAD _Py_SIZE_ROUND_UP(sizeof(struct pool_header), ALIGNMENT) + +#define DUMMY_SIZE_IDX 0xffff /* size class of newly cached pools */ + +/* Round pointer P down to the closest pool-aligned address <= P, as a poolp */ +#define POOL_ADDR(P) ((poolp)_Py_ALIGN_DOWN((P), POOL_SIZE)) + +/* Return total number of blocks in pool of size index I, as a uint. */ +#define NUMBLOCKS(I) ((pymem_uint)(POOL_SIZE - POOL_OVERHEAD) / INDEX2SIZE(I)) + +/*==========================================================================*/ + +/* + * Pool table -- headed, circular, doubly-linked lists of partially used pools. + +This is involved. For an index i, usedpools[i+i] is the header for a list of +all partially used pools holding small blocks with "size class idx" i. So +usedpools[0] corresponds to blocks of size 8, usedpools[2] to blocks of size +16, and so on: index 2*i <-> blocks of size (i+1)<<ALIGNMENT_SHIFT. + +Pools are carved off an arena's highwater mark (an arena_object's pool_address +member) as needed. Once carved off, a pool is in one of three states forever +after: + +used == partially used, neither empty nor full + At least one block in the pool is currently allocated, and at least one + block in the pool is not currently allocated (note this implies a pool + has room for at least two blocks). + This is a pool's initial state, as a pool is created only when malloc + needs space. + The pool holds blocks of a fixed size, and is in the circular list headed + at usedpools[i] (see above). It's linked to the other used pools of the + same size class via the pool_header's nextpool and prevpool members. + If all but one block is currently allocated, a malloc can cause a + transition to the full state. If all but one block is not currently + allocated, a free can cause a transition to the empty state. + +full == all the pool's blocks are currently allocated + On transition to full, a pool is unlinked from its usedpools[] list. + It's not linked to from anything then anymore, and its nextpool and + prevpool members are meaningless until it transitions back to used. + A free of a block in a full pool puts the pool back in the used state. + Then it's linked in at the front of the appropriate usedpools[] list, so + that the next allocation for its size class will reuse the freed block. + +empty == all the pool's blocks are currently available for allocation + On transition to empty, a pool is unlinked from its usedpools[] list, + and linked to the front of its arena_object's singly-linked freepools list, + via its nextpool member. The prevpool member has no meaning in this case. + Empty pools have no inherent size class: the next time a malloc finds + an empty list in usedpools[], it takes the first pool off of freepools. + If the size class needed happens to be the same as the size class the pool + last had, some pool initialization can be skipped. + + +Block Management + +Blocks within pools are again carved out as needed. pool->freeblock points to +the start of a singly-linked list of free blocks within the pool. When a +block is freed, it's inserted at the front of its pool's freeblock list. Note +that the available blocks in a pool are *not* linked all together when a pool +is initialized. Instead only "the first two" (lowest addresses) blocks are +set up, returning the first such block, and setting pool->freeblock to a +one-block list holding the second such block. This is consistent with that +pymalloc strives at all levels (arena, pool, and block) never to touch a piece +of memory until it's actually needed. + +So long as a pool is in the used state, we're certain there *is* a block +available for allocating, and pool->freeblock is not NULL. If pool->freeblock +points to the end of the free list before we've carved the entire pool into +blocks, that means we simply haven't yet gotten to one of the higher-address +blocks. The offset from the pool_header to the start of "the next" virgin +block is stored in the pool_header nextoffset member, and the largest value +of nextoffset that makes sense is stored in the maxnextoffset member when a +pool is initialized. All the blocks in a pool have been passed out at least +once when and only when nextoffset > maxnextoffset. + + +Major obscurity: While the usedpools vector is declared to have poolp +entries, it doesn't really. It really contains two pointers per (conceptual) +poolp entry, the nextpool and prevpool members of a pool_header. The +excruciating initialization code below fools C so that + + usedpool[i+i] + +"acts like" a genuine poolp, but only so long as you only reference its +nextpool and prevpool members. The "- 2*sizeof(pymem_block *)" gibberish is +compensating for that a pool_header's nextpool and prevpool members +immediately follow a pool_header's first two members: + + union { pymem_block *_padding; + uint count; } ref; + pymem_block *freeblock; + +each of which consume sizeof(pymem_block *) bytes. So what usedpools[i+i] really +contains is a fudged-up pointer p such that *if* C believes it's a poolp +pointer, then p->nextpool and p->prevpool are both p (meaning that the headed +circular list is empty). + +It's unclear why the usedpools setup is so convoluted. It could be to +minimize the amount of cache required to hold this heavily-referenced table +(which only *needs* the two interpool pointer members of a pool_header). OTOH, +referencing code has to remember to "double the index" and doing so isn't +free, usedpools[0] isn't a strictly legal pointer, and we're crucially relying +on that C doesn't insert any padding anywhere in a pool_header at or before +the prevpool member. +**************************************************************************** */ + +#define OBMALLOC_USED_POOLS_SIZE (2 * ((NB_SMALL_SIZE_CLASSES + 7) / 8) * 8) + +struct _obmalloc_pools { + poolp used[OBMALLOC_USED_POOLS_SIZE]; +}; + + +/*========================================================================== +Arena management. + +`arenas` is a vector of arena_objects. It contains maxarenas entries, some of +which may not be currently used (== they're arena_objects that aren't +currently associated with an allocated arena). Note that arenas proper are +separately malloc'ed. + +Prior to Python 2.5, arenas were never free()'ed. Starting with Python 2.5, +we do try to free() arenas, and use some mild heuristic strategies to increase +the likelihood that arenas eventually can be freed. + +unused_arena_objects + + This is a singly-linked list of the arena_objects that are currently not + being used (no arena is associated with them). Objects are taken off the + head of the list in new_arena(), and are pushed on the head of the list in + PyObject_Free() when the arena is empty. Key invariant: an arena_object + is on this list if and only if its .address member is 0. + +usable_arenas + + This is a doubly-linked list of the arena_objects associated with arenas + that have pools available. These pools are either waiting to be reused, + or have not been used before. The list is sorted to have the most- + allocated arenas first (ascending order based on the nfreepools member). + This means that the next allocation will come from a heavily used arena, + which gives the nearly empty arenas a chance to be returned to the system. + In my unscientific tests this dramatically improved the number of arenas + that could be freed. + +Note that an arena_object associated with an arena all of whose pools are +currently in use isn't on either list. + +Changed in Python 3.8: keeping usable_arenas sorted by number of free pools +used to be done by one-at-a-time linear search when an arena's number of +free pools changed. That could, overall, consume time quadratic in the +number of arenas. That didn't really matter when there were only a few +hundred arenas (typical!), but could be a timing disaster when there were +hundreds of thousands. See bpo-37029. + +Now we have a vector of "search fingers" to eliminate the need to search: +nfp2lasta[nfp] returns the last ("rightmost") arena in usable_arenas +with nfp free pools. This is NULL if and only if there is no arena with +nfp free pools in usable_arenas. +*/ + +/* How many arena_objects do we initially allocate? + * 16 = can allocate 16 arenas = 16 * ARENA_SIZE = 4MB before growing the + * `arenas` vector. + */ +#define INITIAL_ARENA_OBJECTS 16 + +struct _obmalloc_mgmt { + /* Array of objects used to track chunks of memory (arenas). */ + struct arena_object* arenas; + /* Number of slots currently allocated in the `arenas` vector. */ + uint maxarenas; + + /* The head of the singly-linked, NULL-terminated list of available + * arena_objects. + */ + struct arena_object* unused_arena_objects; + + /* The head of the doubly-linked, NULL-terminated at each end, list of + * arena_objects associated with arenas that have pools available. + */ + struct arena_object* usable_arenas; + + /* nfp2lasta[nfp] is the last arena in usable_arenas with nfp free pools */ + struct arena_object* nfp2lasta[MAX_POOLS_IN_ARENA + 1]; + + /* Number of arenas allocated that haven't been free()'d. */ + size_t narenas_currently_allocated; + + /* Total number of times malloc() called to allocate an arena. */ + size_t ntimes_arena_allocated; + /* High water mark (max value ever seen) for narenas_currently_allocated. */ + size_t narenas_highwater; + + Py_ssize_t raw_allocated_blocks; +}; + + +#if WITH_PYMALLOC_RADIX_TREE +/*==========================================================================*/ +/* radix tree for tracking arena usage. If enabled, used to implement + address_in_range(). + + memory address bit allocation for keys + + 64-bit pointers, IGNORE_BITS=0 and 2^20 arena size: + 15 -> MAP_TOP_BITS + 15 -> MAP_MID_BITS + 14 -> MAP_BOT_BITS + 20 -> ideal aligned arena + ---- + 64 + + 64-bit pointers, IGNORE_BITS=16, and 2^20 arena size: + 16 -> IGNORE_BITS + 10 -> MAP_TOP_BITS + 10 -> MAP_MID_BITS + 8 -> MAP_BOT_BITS + 20 -> ideal aligned arena + ---- + 64 + + 32-bit pointers and 2^18 arena size: + 14 -> MAP_BOT_BITS + 18 -> ideal aligned arena + ---- + 32 + +*/ + +#if SIZEOF_VOID_P == 8 + +/* number of bits in a pointer */ +#define POINTER_BITS 64 + +/* High bits of memory addresses that will be ignored when indexing into the + * radix tree. Setting this to zero is the safe default. For most 64-bit + * machines, setting this to 16 would be safe. The kernel would not give + * user-space virtual memory addresses that have significant information in + * those high bits. The main advantage to setting IGNORE_BITS > 0 is that less + * virtual memory will be used for the top and middle radix tree arrays. Those + * arrays are allocated in the BSS segment and so will typically consume real + * memory only if actually accessed. + */ +#define IGNORE_BITS 0 + +/* use the top and mid layers of the radix tree */ +#define USE_INTERIOR_NODES + +#elif SIZEOF_VOID_P == 4 + +#define POINTER_BITS 32 +#define IGNORE_BITS 0 + +#else + + /* Currently this code works for 64-bit or 32-bit pointers only. */ +#error "obmalloc radix tree requires 64-bit or 32-bit pointers." + +#endif /* SIZEOF_VOID_P */ + +/* arena_coverage_t members require this to be true */ +#if ARENA_BITS >= 32 +# error "arena size must be < 2^32" +#endif + +/* the lower bits of the address that are not ignored */ +#define ADDRESS_BITS (POINTER_BITS - IGNORE_BITS) + +#ifdef USE_INTERIOR_NODES +/* number of bits used for MAP_TOP and MAP_MID nodes */ +#define INTERIOR_BITS ((ADDRESS_BITS - ARENA_BITS + 2) / 3) +#else +#define INTERIOR_BITS 0 +#endif + +#define MAP_TOP_BITS INTERIOR_BITS +#define MAP_TOP_LENGTH (1 << MAP_TOP_BITS) +#define MAP_TOP_MASK (MAP_TOP_LENGTH - 1) + +#define MAP_MID_BITS INTERIOR_BITS +#define MAP_MID_LENGTH (1 << MAP_MID_BITS) +#define MAP_MID_MASK (MAP_MID_LENGTH - 1) + +#define MAP_BOT_BITS (ADDRESS_BITS - ARENA_BITS - 2*INTERIOR_BITS) +#define MAP_BOT_LENGTH (1 << MAP_BOT_BITS) +#define MAP_BOT_MASK (MAP_BOT_LENGTH - 1) + +#define MAP_BOT_SHIFT ARENA_BITS +#define MAP_MID_SHIFT (MAP_BOT_BITS + MAP_BOT_SHIFT) +#define MAP_TOP_SHIFT (MAP_MID_BITS + MAP_MID_SHIFT) + +#define AS_UINT(p) ((uintptr_t)(p)) +#define MAP_BOT_INDEX(p) ((AS_UINT(p) >> MAP_BOT_SHIFT) & MAP_BOT_MASK) +#define MAP_MID_INDEX(p) ((AS_UINT(p) >> MAP_MID_SHIFT) & MAP_MID_MASK) +#define MAP_TOP_INDEX(p) ((AS_UINT(p) >> MAP_TOP_SHIFT) & MAP_TOP_MASK) + +#if IGNORE_BITS > 0 +/* Return the ignored part of the pointer address. Those bits should be same + * for all valid pointers if IGNORE_BITS is set correctly. + */ +#define HIGH_BITS(p) (AS_UINT(p) >> ADDRESS_BITS) +#else +#define HIGH_BITS(p) 0 +#endif + + +/* This is the leaf of the radix tree. See arena_map_mark_used() for the + * meaning of these members. */ +typedef struct { + int32_t tail_hi; + int32_t tail_lo; +} arena_coverage_t; + +typedef struct arena_map_bot { + /* The members tail_hi and tail_lo are accessed together. So, it + * better to have them as an array of structs, rather than two + * arrays. + */ + arena_coverage_t arenas[MAP_BOT_LENGTH]; +} arena_map_bot_t; + +#ifdef USE_INTERIOR_NODES +typedef struct arena_map_mid { + struct arena_map_bot *ptrs[MAP_MID_LENGTH]; +} arena_map_mid_t; + +typedef struct arena_map_top { + struct arena_map_mid *ptrs[MAP_TOP_LENGTH]; +} arena_map_top_t; +#endif + +struct _obmalloc_usage { + /* The root of radix tree. Note that by initializing like this, the memory + * should be in the BSS. The OS will only memory map pages as the MAP_MID + * nodes get used (OS pages are demand loaded as needed). + */ +#ifdef USE_INTERIOR_NODES + arena_map_top_t arena_map_root; + /* accounting for number of used interior nodes */ + int arena_map_mid_count; + int arena_map_bot_count; +#else + arena_map_bot_t arena_map_root; +#endif +}; + +#endif /* WITH_PYMALLOC_RADIX_TREE */ + + +struct _obmalloc_state { + int dump_debug_stats; + struct _obmalloc_pools pools; + struct _obmalloc_mgmt mgmt; + struct _obmalloc_usage usage; +}; + + +#undef uint + + +/* Allocate memory directly from the O/S virtual memory system, + * where supported. Otherwise fallback on malloc */ +void *_PyObject_VirtualAlloc(size_t size); +void _PyObject_VirtualFree(void *, size_t size); + + +/* This function returns the number of allocated memory blocks, regardless of size */ +PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void); + + +#ifdef WITH_PYMALLOC +// Export the symbol for the 3rd party guppy3 project +PyAPI_FUNC(int) _PyObject_DebugMallocStats(FILE *out); +#endif + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_OBMALLOC_H diff --git a/Include/internal/pycore_obmalloc_init.h b/Include/internal/pycore_obmalloc_init.h new file mode 100644 index 00000000000000..c9f197e72de9f5 --- /dev/null +++ b/Include/internal/pycore_obmalloc_init.h @@ -0,0 +1,69 @@ +#ifndef Py_INTERNAL_OBMALLOC_INIT_H +#define Py_INTERNAL_OBMALLOC_INIT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +/****************************************************/ +/* the default object allocator's state initializer */ + +#define PTA(pools, x) \ + ((poolp )((uint8_t *)&(pools.used[2*(x)]) - 2*sizeof(pymem_block *))) +#define PT(p, x) PTA(p, x), PTA(p, x) + +#define PT_8(p, start) \ + PT(p, start), \ + PT(p, start+1), \ + PT(p, start+2), \ + PT(p, start+3), \ + PT(p, start+4), \ + PT(p, start+5), \ + PT(p, start+6), \ + PT(p, start+7) + +#if NB_SMALL_SIZE_CLASSES <= 8 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0) } +#elif NB_SMALL_SIZE_CLASSES <= 16 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8) } +#elif NB_SMALL_SIZE_CLASSES <= 24 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16) } +#elif NB_SMALL_SIZE_CLASSES <= 32 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24) } +#elif NB_SMALL_SIZE_CLASSES <= 40 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32) } +#elif NB_SMALL_SIZE_CLASSES <= 48 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32), PT_8(p, 40) } +#elif NB_SMALL_SIZE_CLASSES <= 56 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32), PT_8(p, 40), PT_8(p, 48) } +#elif NB_SMALL_SIZE_CLASSES <= 64 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32), PT_8(p, 40), PT_8(p, 48), PT_8(p, 56) } +#else +# error "NB_SMALL_SIZE_CLASSES should be less than 64" +#endif + +#define _obmalloc_state_INIT(obmalloc) \ + { \ + .dump_debug_stats = -1, \ + .pools = { \ + .used = _obmalloc_pools_INIT(obmalloc.pools), \ + }, \ + } + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_OBMALLOC_INIT_H diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h index 33617b4be444f4..6e5d45343f3c23 100644 --- a/Include/internal/pycore_opcode.h +++ b/Include/internal/pycore_opcode.h @@ -58,7 +58,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [BEFORE_ASYNC_WITH] = BEFORE_ASYNC_WITH, [BEFORE_WITH] = BEFORE_WITH, [BINARY_OP] = BINARY_OP, - [BINARY_OP_ADAPTIVE] = BINARY_OP, [BINARY_OP_ADD_FLOAT] = BINARY_OP, [BINARY_OP_ADD_INT] = BINARY_OP, [BINARY_OP_ADD_UNICODE] = BINARY_OP, @@ -69,7 +68,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [BINARY_OP_SUBTRACT_INT] = BINARY_OP, [BINARY_SLICE] = BINARY_SLICE, [BINARY_SUBSCR] = BINARY_SUBSCR, - [BINARY_SUBSCR_ADAPTIVE] = BINARY_SUBSCR, [BINARY_SUBSCR_DICT] = BINARY_SUBSCR, [BINARY_SUBSCR_GETITEM] = BINARY_SUBSCR, [BINARY_SUBSCR_LIST_INT] = BINARY_SUBSCR, @@ -83,7 +81,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [BUILD_TUPLE] = BUILD_TUPLE, [CACHE] = CACHE, [CALL] = CALL, - [CALL_ADAPTIVE] = CALL, [CALL_BOUND_METHOD_EXACT_ARGS] = CALL, [CALL_BUILTIN_CLASS] = CALL, [CALL_BUILTIN_FAST_WITH_KEYWORDS] = CALL, @@ -106,7 +103,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [CHECK_EXC_MATCH] = CHECK_EXC_MATCH, [CLEANUP_THROW] = CLEANUP_THROW, [COMPARE_OP] = COMPARE_OP, - [COMPARE_OP_ADAPTIVE] = COMPARE_OP, [COMPARE_OP_FLOAT_JUMP] = COMPARE_OP, [COMPARE_OP_INT_JUMP] = COMPARE_OP, [COMPARE_OP_STR_JUMP] = COMPARE_OP, @@ -124,12 +120,12 @@ const uint8_t _PyOpcode_Deopt[256] = { [END_ASYNC_FOR] = END_ASYNC_FOR, [END_FOR] = END_FOR, [EXTENDED_ARG] = EXTENDED_ARG, - [EXTENDED_ARG_QUICK] = EXTENDED_ARG, [FORMAT_VALUE] = FORMAT_VALUE, [FOR_ITER] = FOR_ITER, - [FOR_ITER_ADAPTIVE] = FOR_ITER, + [FOR_ITER_GEN] = FOR_ITER, [FOR_ITER_LIST] = FOR_ITER, [FOR_ITER_RANGE] = FOR_ITER, + [FOR_ITER_TUPLE] = FOR_ITER, [GET_AITER] = GET_AITER, [GET_ANEXT] = GET_ANEXT, [GET_AWAITABLE] = GET_AWAITABLE, @@ -139,10 +135,10 @@ const uint8_t _PyOpcode_Deopt[256] = { [IMPORT_FROM] = IMPORT_FROM, [IMPORT_NAME] = IMPORT_NAME, [IMPORT_STAR] = IMPORT_STAR, + [INTERPRETER_EXIT] = INTERPRETER_EXIT, [IS_OP] = IS_OP, [JUMP_BACKWARD] = JUMP_BACKWARD, [JUMP_BACKWARD_NO_INTERRUPT] = JUMP_BACKWARD_NO_INTERRUPT, - [JUMP_BACKWARD_QUICK] = JUMP_BACKWARD, [JUMP_FORWARD] = JUMP_FORWARD, [JUMP_IF_FALSE_OR_POP] = JUMP_IF_FALSE_OR_POP, [JUMP_IF_TRUE_OR_POP] = JUMP_IF_TRUE_OR_POP, @@ -152,7 +148,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [LIST_TO_TUPLE] = LIST_TO_TUPLE, [LOAD_ASSERTION_ERROR] = LOAD_ASSERTION_ERROR, [LOAD_ATTR] = LOAD_ATTR, - [LOAD_ATTR_ADAPTIVE] = LOAD_ATTR, [LOAD_ATTR_CLASS] = LOAD_ATTR, [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = LOAD_ATTR, [LOAD_ATTR_INSTANCE_VALUE] = LOAD_ATTR, @@ -172,10 +167,10 @@ const uint8_t _PyOpcode_Deopt[256] = { [LOAD_DEREF] = LOAD_DEREF, [LOAD_FAST] = LOAD_FAST, [LOAD_FAST_CHECK] = LOAD_FAST_CHECK, + [LOAD_FAST_R] = LOAD_FAST_R, [LOAD_FAST__LOAD_CONST] = LOAD_FAST, [LOAD_FAST__LOAD_FAST] = LOAD_FAST, [LOAD_GLOBAL] = LOAD_GLOBAL, - [LOAD_GLOBAL_ADAPTIVE] = LOAD_GLOBAL, [LOAD_GLOBAL_BUILTIN] = LOAD_GLOBAL, [LOAD_GLOBAL_MODULE] = LOAD_GLOBAL, [LOAD_NAME] = LOAD_NAME, @@ -200,37 +195,39 @@ const uint8_t _PyOpcode_Deopt[256] = { [RAISE_VARARGS] = RAISE_VARARGS, [RERAISE] = RERAISE, [RESUME] = RESUME, - [RESUME_QUICK] = RESUME, [RETURN_GENERATOR] = RETURN_GENERATOR, [RETURN_VALUE] = RETURN_VALUE, [SEND] = SEND, [SETUP_ANNOTATIONS] = SETUP_ANNOTATIONS, [SET_ADD] = SET_ADD, [SET_UPDATE] = SET_UPDATE, + [STOPITERATION_ERROR] = STOPITERATION_ERROR, [STORE_ATTR] = STORE_ATTR, - [STORE_ATTR_ADAPTIVE] = STORE_ATTR, [STORE_ATTR_INSTANCE_VALUE] = STORE_ATTR, [STORE_ATTR_SLOT] = STORE_ATTR, [STORE_ATTR_WITH_HINT] = STORE_ATTR, [STORE_DEREF] = STORE_DEREF, [STORE_FAST] = STORE_FAST, + [STORE_FAST_R] = STORE_FAST_R, [STORE_FAST__LOAD_FAST] = STORE_FAST, [STORE_FAST__STORE_FAST] = STORE_FAST, [STORE_GLOBAL] = STORE_GLOBAL, [STORE_NAME] = STORE_NAME, [STORE_SLICE] = STORE_SLICE, [STORE_SUBSCR] = STORE_SUBSCR, - [STORE_SUBSCR_ADAPTIVE] = STORE_SUBSCR, [STORE_SUBSCR_DICT] = STORE_SUBSCR, [STORE_SUBSCR_LIST_INT] = STORE_SUBSCR, [SWAP] = SWAP, [UNARY_INVERT] = UNARY_INVERT, + [UNARY_INVERT_R] = UNARY_INVERT_R, [UNARY_NEGATIVE] = UNARY_NEGATIVE, + [UNARY_NEGATIVE_R] = UNARY_NEGATIVE_R, [UNARY_NOT] = UNARY_NOT, + [UNARY_NOT_R] = UNARY_NOT_R, [UNARY_POSITIVE] = UNARY_POSITIVE, + [UNARY_POSITIVE_R] = UNARY_POSITIVE_R, [UNPACK_EX] = UNPACK_EX, [UNPACK_SEQUENCE] = UNPACK_SEQUENCE, - [UNPACK_SEQUENCE_ADAPTIVE] = UNPACK_SEQUENCE, [UNPACK_SEQUENCE_LIST] = UNPACK_SEQUENCE, [UNPACK_SEQUENCE_TUPLE] = UNPACK_SEQUENCE, [UNPACK_SEQUENCE_TWO_TUPLE] = UNPACK_SEQUENCE, @@ -244,7 +241,7 @@ static const char *const _PyOpcode_OpName[263] = { [CACHE] = "CACHE", [POP_TOP] = "POP_TOP", [PUSH_NULL] = "PUSH_NULL", - [BINARY_OP_ADAPTIVE] = "BINARY_OP_ADAPTIVE", + [INTERPRETER_EXIT] = "INTERPRETER_EXIT", [END_FOR] = "END_FOR", [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", @@ -257,28 +254,30 @@ static const char *const _PyOpcode_OpName[263] = { [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", [BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT", [UNARY_INVERT] = "UNARY_INVERT", + [UNARY_POSITIVE_R] = "UNARY_POSITIVE_R", + [UNARY_NEGATIVE_R] = "UNARY_NEGATIVE_R", + [UNARY_NOT_R] = "UNARY_NOT_R", + [UNARY_INVERT_R] = "UNARY_INVERT_R", [BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT", [BINARY_OP_SUBTRACT_INT] = "BINARY_OP_SUBTRACT_INT", - [BINARY_SUBSCR_ADAPTIVE] = "BINARY_SUBSCR_ADAPTIVE", [BINARY_SUBSCR_DICT] = "BINARY_SUBSCR_DICT", [BINARY_SUBSCR_GETITEM] = "BINARY_SUBSCR_GETITEM", [BINARY_SUBSCR_LIST_INT] = "BINARY_SUBSCR_LIST_INT", - [BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT", - [CALL_ADAPTIVE] = "CALL_ADAPTIVE", - [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", [BINARY_SUBSCR] = "BINARY_SUBSCR", [BINARY_SLICE] = "BINARY_SLICE", [STORE_SLICE] = "STORE_SLICE", - [CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS", - [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", + [BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT", + [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", [GET_LEN] = "GET_LEN", [MATCH_MAPPING] = "MATCH_MAPPING", [MATCH_SEQUENCE] = "MATCH_SEQUENCE", [MATCH_KEYS] = "MATCH_KEYS", - [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", + [CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS", [PUSH_EXC_INFO] = "PUSH_EXC_INFO", [CHECK_EXC_MATCH] = "CHECK_EXC_MATCH", [CHECK_EG_MATCH] = "CHECK_EG_MATCH", + [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", + [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", [CALL_NO_KW_BUILTIN_FAST] = "CALL_NO_KW_BUILTIN_FAST", @@ -288,8 +287,6 @@ static const char *const _PyOpcode_OpName[263] = { [CALL_NO_KW_LIST_APPEND] = "CALL_NO_KW_LIST_APPEND", [CALL_NO_KW_METHOD_DESCRIPTOR_FAST] = "CALL_NO_KW_METHOD_DESCRIPTOR_FAST", [CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS] = "CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS", - [CALL_NO_KW_METHOD_DESCRIPTOR_O] = "CALL_NO_KW_METHOD_DESCRIPTOR_O", - [CALL_NO_KW_STR_1] = "CALL_NO_KW_STR_1", [WITH_EXCEPT_START] = "WITH_EXCEPT_START", [GET_AITER] = "GET_AITER", [GET_ANEXT] = "GET_ANEXT", @@ -297,24 +294,24 @@ static const char *const _PyOpcode_OpName[263] = { [BEFORE_WITH] = "BEFORE_WITH", [END_ASYNC_FOR] = "END_ASYNC_FOR", [CLEANUP_THROW] = "CLEANUP_THROW", + [CALL_NO_KW_METHOD_DESCRIPTOR_O] = "CALL_NO_KW_METHOD_DESCRIPTOR_O", + [CALL_NO_KW_STR_1] = "CALL_NO_KW_STR_1", [CALL_NO_KW_TUPLE_1] = "CALL_NO_KW_TUPLE_1", [CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1", - [COMPARE_OP_ADAPTIVE] = "COMPARE_OP_ADAPTIVE", - [COMPARE_OP_FLOAT_JUMP] = "COMPARE_OP_FLOAT_JUMP", [STORE_SUBSCR] = "STORE_SUBSCR", [DELETE_SUBSCR] = "DELETE_SUBSCR", + [COMPARE_OP_FLOAT_JUMP] = "COMPARE_OP_FLOAT_JUMP", + [STOPITERATION_ERROR] = "STOPITERATION_ERROR", [COMPARE_OP_INT_JUMP] = "COMPARE_OP_INT_JUMP", [COMPARE_OP_STR_JUMP] = "COMPARE_OP_STR_JUMP", - [EXTENDED_ARG_QUICK] = "EXTENDED_ARG_QUICK", - [FOR_ITER_ADAPTIVE] = "FOR_ITER_ADAPTIVE", [FOR_ITER_LIST] = "FOR_ITER_LIST", - [FOR_ITER_RANGE] = "FOR_ITER_RANGE", + [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE", [GET_ITER] = "GET_ITER", [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", [PRINT_EXPR] = "PRINT_EXPR", [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", - [JUMP_BACKWARD_QUICK] = "JUMP_BACKWARD_QUICK", - [LOAD_ATTR_ADAPTIVE] = "LOAD_ATTR_ADAPTIVE", + [FOR_ITER_RANGE] = "FOR_ITER_RANGE", + [FOR_ITER_GEN] = "FOR_ITER_GEN", [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", [RETURN_GENERATOR] = "RETURN_GENERATOR", [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", @@ -394,35 +391,35 @@ static const char *const _PyOpcode_OpName[263] = { [YIELD_VALUE] = "YIELD_VALUE", [RESUME] = "RESUME", [MATCH_CLASS] = "MATCH_CLASS", - [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST", - [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST", + [LOAD_FAST_R] = "LOAD_FAST_R", + [STORE_FAST_R] = "STORE_FAST_R", [FORMAT_VALUE] = "FORMAT_VALUE", [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP", [BUILD_STRING] = "BUILD_STRING", + [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST", + [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST", [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST", - [LOAD_GLOBAL_ADAPTIVE] = "LOAD_GLOBAL_ADAPTIVE", [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", - [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", [LIST_EXTEND] = "LIST_EXTEND", [SET_UPDATE] = "SET_UPDATE", [DICT_MERGE] = "DICT_MERGE", [DICT_UPDATE] = "DICT_UPDATE", - [RESUME_QUICK] = "RESUME_QUICK", - [STORE_ATTR_ADAPTIVE] = "STORE_ATTR_ADAPTIVE", + [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", + [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", [CALL] = "CALL", [KW_NAMES] = "KW_NAMES", - [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST", - [STORE_SUBSCR_ADAPTIVE] = "STORE_SUBSCR_ADAPTIVE", [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", - [UNPACK_SEQUENCE_ADAPTIVE] = "UNPACK_SEQUENCE_ADAPTIVE", [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST", [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", + [179] = "<179>", + [180] = "<180>", + [181] = "<181>", [182] = "<182>", [183] = "<183>", [184] = "<184>", @@ -508,6 +505,9 @@ static const char *const _PyOpcode_OpName[263] = { #endif #define EXTRA_CASES \ + case 179: \ + case 180: \ + case 181: \ case 182: \ case 183: \ case 184: \ diff --git a/Include/internal/pycore_parser.h b/Include/internal/pycore_parser.h index e2de24e2ca9734..2d2b56bd824cb4 100644 --- a/Include/internal/pycore_parser.h +++ b/Include/internal/pycore_parser.h @@ -8,12 +8,31 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif + +#include "pycore_pyarena.h" // PyArena + + +#ifdef Py_DEBUG +#define _PYPEGEN_NSTATISTICS 2000 +#endif + +struct _parser_runtime_state { +#ifdef Py_DEBUG + long memo_statistics[_PYPEGEN_NSTATISTICS]; +#else + int _not_used; +#endif +}; + + + extern struct _mod* _PyParser_ASTFromString( const char *str, PyObject* filename, int mode, PyCompilerFlags *flags, PyArena *arena); + extern struct _mod* _PyParser_ASTFromFile( FILE *fp, PyObject *filename_ob, @@ -25,6 +44,7 @@ extern struct _mod* _PyParser_ASTFromFile( int *errcode, PyArena *arena); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_pyhash.h b/Include/internal/pycore_pyhash.h index a229f8d8b7f0a2..34dfa53771288e 100644 --- a/Include/internal/pycore_pyhash.h +++ b/Include/internal/pycore_pyhash.h @@ -5,6 +5,36 @@ # error "this header requires Py_BUILD_CORE define" #endif -uint64_t _Py_KeyedHash(uint64_t, const char *, Py_ssize_t); +struct pyhash_runtime_state { + struct { +#ifndef MS_WINDOWS + int fd; + dev_t st_dev; + ino_t st_ino; +#else + // This is a placeholder so the struct isn't empty on Windows. + int _not_used; +#endif + } urandom_cache; +}; + +#ifndef MS_WINDOWS +# define _py_urandom_cache_INIT \ + { \ + .fd = -1, \ + } +#else +# define _py_urandom_cache_INIT {0} #endif + +#define pyhash_state_INIT \ + { \ + .urandom_cache = _py_urandom_cache_INIT, \ + } + + +uint64_t _Py_KeyedHash(uint64_t, const char *, Py_ssize_t); + + +#endif // Py_INTERNAL_HASH_H diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index b4718b8ade2354..370e4cbd59f976 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -14,10 +14,6 @@ extern "C" { struct _PyArgv; struct pyruntimestate; -/* True if the main interpreter thread exited due to an unhandled - * KeyboardInterrupt exception, suggesting the user pressed ^C. */ -PyAPI_DATA(int) _Py_UnhandledKeyboardInterrupt; - extern int _Py_SetFileSystemEncoding( const char *encoding, const char *errors); @@ -33,6 +29,8 @@ PyAPI_FUNC(int) _Py_IsLocaleCoercionTarget(const char *ctype_loc); /* Various one-time initializers */ +extern void _Py_InitVersion(void); +extern PyStatus _PyImport_Init(void); extern PyStatus _PyFaulthandler_Init(int enable); extern int _PyTraceMalloc_Init(int enable); extern PyObject * _PyBuiltin_Init(PyInterpreterState *interp); @@ -46,6 +44,7 @@ extern void _PySys_Fini(PyInterpreterState *interp); extern int _PyBuiltins_AddExceptions(PyObject * bltinmod); extern PyStatus _Py_HashRandomization_Init(const PyConfig *); +extern PyStatus _PyTime_Init(void); extern PyStatus _PyImportZip_Init(PyThreadState *tstate); extern PyStatus _PyGC_Init(PyInterpreterState *interp); extern PyStatus _PyAtExit_Init(PyInterpreterState *interp); diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index b9eea9d4b30ad1..4cc953d8d779c9 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -11,6 +11,27 @@ extern "C" { #include "pymem.h" // PyMemAllocatorName +typedef struct { + /* We tag each block with an API ID in order to tag API violations */ + char api_id; + PyMemAllocatorEx alloc; +} debug_alloc_api_t; + +struct _pymem_allocators { + struct { + PyMemAllocatorEx raw; + PyMemAllocatorEx mem; + PyMemAllocatorEx obj; + } standard; + struct { + debug_alloc_api_t raw; + debug_alloc_api_t mem; + debug_alloc_api_t obj; + } debug; + PyObjectArenaAllocator obj_arena; +}; + + /* Set the memory allocator of the specified domain to the default. Save the old allocator into *old_alloc if it's non-NULL. Return on success, or return -1 if the domain is unknown. */ @@ -69,44 +90,6 @@ PyAPI_FUNC(int) _PyMem_GetAllocatorName( PYMEM_ALLOCATOR_NOT_SET does nothing. */ PyAPI_FUNC(int) _PyMem_SetupAllocators(PyMemAllocatorName allocator); -struct _PyTraceMalloc_Config { - /* Module initialized? - Variable protected by the GIL */ - enum { - TRACEMALLOC_NOT_INITIALIZED, - TRACEMALLOC_INITIALIZED, - TRACEMALLOC_FINALIZED - } initialized; - - /* Is tracemalloc tracing memory allocations? - Variable protected by the GIL */ - int tracing; - - /* limit of the number of frames in a traceback, 1 by default. - Variable protected by the GIL. */ - int max_nframe; -}; - -#define _PyTraceMalloc_Config_INIT \ - {.initialized = TRACEMALLOC_NOT_INITIALIZED, \ - .tracing = 0, \ - .max_nframe = 1} - -PyAPI_DATA(struct _PyTraceMalloc_Config) _Py_tracemalloc_config; - -/* Allocate memory directly from the O/S virtual memory system, - * where supported. Otherwise fallback on malloc */ -void *_PyObject_VirtualAlloc(size_t size); -void _PyObject_VirtualFree(void *, size_t size); - -/* This function returns the number of allocated memory blocks, regardless of size */ -PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void); - -/* Macros */ -#ifdef WITH_PYMALLOC -// Export the symbol for the 3rd party guppy3 project -PyAPI_FUNC(int) _PyObject_DebugMallocStats(FILE *out); -#endif #ifdef __cplusplus } diff --git a/Include/internal/pycore_pymem_init.h b/Include/internal/pycore_pymem_init.h new file mode 100644 index 00000000000000..78232738cb09d5 --- /dev/null +++ b/Include/internal/pycore_pymem_init.h @@ -0,0 +1,85 @@ +#ifndef Py_INTERNAL_PYMEM_INIT_H +#define Py_INTERNAL_PYMEM_INIT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "pycore_pymem.h" + + +/********************************/ +/* the allocators' initializers */ + +extern void * _PyMem_RawMalloc(void *, size_t); +extern void * _PyMem_RawCalloc(void *, size_t, size_t); +extern void * _PyMem_RawRealloc(void *, void *, size_t); +extern void _PyMem_RawFree(void *, void *); +#define PYRAW_ALLOC {NULL, _PyMem_RawMalloc, _PyMem_RawCalloc, _PyMem_RawRealloc, _PyMem_RawFree} + +#ifdef WITH_PYMALLOC +extern void* _PyObject_Malloc(void *, size_t); +extern void* _PyObject_Calloc(void *, size_t, size_t); +extern void _PyObject_Free(void *, void *); +extern void* _PyObject_Realloc(void *, void *, size_t); +# define PYOBJ_ALLOC {NULL, _PyObject_Malloc, _PyObject_Calloc, _PyObject_Realloc, _PyObject_Free} +#else +# define PYOBJ_ALLOC PYRAW_ALLOC +#endif // WITH_PYMALLOC + +#define PYMEM_ALLOC PYOBJ_ALLOC + +extern void* _PyMem_DebugRawMalloc(void *, size_t); +extern void* _PyMem_DebugRawCalloc(void *, size_t, size_t); +extern void* _PyMem_DebugRawRealloc(void *, void *, size_t); +extern void _PyMem_DebugRawFree(void *, void *); + +extern void* _PyMem_DebugMalloc(void *, size_t); +extern void* _PyMem_DebugCalloc(void *, size_t, size_t); +extern void* _PyMem_DebugRealloc(void *, void *, size_t); +extern void _PyMem_DebugFree(void *, void *); + +#define PYDBGRAW_ALLOC(runtime) \ + {&(runtime).allocators.debug.raw, _PyMem_DebugRawMalloc, _PyMem_DebugRawCalloc, _PyMem_DebugRawRealloc, _PyMem_DebugRawFree} +#define PYDBGMEM_ALLOC(runtime) \ + {&(runtime).allocators.debug.mem, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} +#define PYDBGOBJ_ALLOC(runtime) \ + {&(runtime).allocators.debug.obj, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} + +extern void * _PyMem_ArenaAlloc(void *, size_t); +extern void _PyMem_ArenaFree(void *, void *, size_t); + +#ifdef Py_DEBUG +# define _pymem_allocators_standard_INIT(runtime) \ + { \ + PYDBGRAW_ALLOC(runtime), \ + PYDBGMEM_ALLOC(runtime), \ + PYDBGOBJ_ALLOC(runtime), \ + } +#else +# define _pymem_allocators_standard_INIT(runtime) \ + { \ + PYRAW_ALLOC, \ + PYMEM_ALLOC, \ + PYOBJ_ALLOC, \ + } +#endif + +#define _pymem_allocators_debug_INIT \ + { \ + {'r', PYRAW_ALLOC}, \ + {'m', PYMEM_ALLOC}, \ + {'o', PYOBJ_ALLOC}, \ + } + +# define _pymem_allocators_obj_arena_INIT \ + { NULL, _PyMem_ArenaAlloc, _PyMem_ArenaFree } + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_PYMEM_INIT_H diff --git a/Include/internal/pycore_pythread.h b/Include/internal/pycore_pythread.h new file mode 100644 index 00000000000000..f53921494c158f --- /dev/null +++ b/Include/internal/pycore_pythread.h @@ -0,0 +1,81 @@ +#ifndef Py_INTERNAL_PYTHREAD_H +#define Py_INTERNAL_PYTHREAD_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +#ifndef _POSIX_THREADS +/* This means pthreads are not implemented in libc headers, hence the macro + not present in unistd.h. But they still can be implemented as an external + library (e.g. gnu pth in pthread emulation) */ +# ifdef HAVE_PTHREAD_H +# include <pthread.h> /* _POSIX_THREADS */ +# endif +# ifndef _POSIX_THREADS +/* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then + enough of the Posix threads package is implemented to support python + threads. + + This is valid for HP-UX 11.23 running on an ia64 system. If needed, add + a check of __ia64 to verify that we're running on an ia64 system instead + of a pa-risc system. +*/ +# ifdef __hpux +# ifdef _SC_THREADS +# define _POSIX_THREADS +# endif +# endif +# endif /* _POSIX_THREADS */ +#endif /* _POSIX_THREADS */ + +#if defined(_POSIX_THREADS) || defined(HAVE_PTHREAD_STUBS) +# define _USE_PTHREADS +#endif + +#if defined(_USE_PTHREADS) && defined(HAVE_PTHREAD_CONDATTR_SETCLOCK) && defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_MONOTONIC) +// monotonic is supported statically. It doesn't mean it works on runtime. +# define CONDATTR_MONOTONIC +#endif + + +#if defined(HAVE_PTHREAD_STUBS) +// pthread_key +struct py_stub_tls_entry { + bool in_use; + void *value; +}; +#endif + +struct _pythread_runtime_state { + int initialized; + +#ifdef _USE_PTHREADS + // This matches when thread_pthread.h is used. + struct { + /* NULL when pthread_condattr_setclock(CLOCK_MONOTONIC) is not supported. */ + pthread_condattr_t *ptr; +# ifdef CONDATTR_MONOTONIC + /* The value to which condattr_monotonic is set. */ + pthread_condattr_t val; +# endif + } _condattr_monotonic; + +#endif // USE_PTHREADS + +#if defined(HAVE_PTHREAD_STUBS) + struct { + struct py_stub_tls_entry tls_entries[PTHREAD_KEYS_MAX]; + } stubs; +#endif +}; + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_PYTHREAD_H */ diff --git a/Include/internal/pycore_range.h b/Include/internal/pycore_range.h index 809e89a1e01b60..bf045ec4fd8332 100644 --- a/Include/internal/pycore_range.h +++ b/Include/internal/pycore_range.h @@ -10,7 +10,6 @@ extern "C" { typedef struct { PyObject_HEAD - long index; long start; long step; long len; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index d1fbc09f1ea206..d100e836c7d153 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -9,26 +9,32 @@ extern "C" { #endif #include "pycore_atomic.h" /* _Py_atomic_address */ -#include "pycore_gil.h" // struct _gil_runtime_state +#include "pycore_ceval_state.h" // struct _ceval_runtime_state +#include "pycore_dict_state.h" // struct _Py_dict_runtime_state +#include "pycore_dtoa.h" // struct _dtoa_runtime_state +#include "pycore_floatobject.h" // struct _Py_float_runtime_state +#include "pycore_faulthandler.h" // struct _faulthandler_runtime_state +#include "pycore_function.h" // struct _func_runtime_state #include "pycore_global_objects.h" // struct _Py_global_objects +#include "pycore_import.h" // struct _import_runtime_state #include "pycore_interp.h" // PyInterpreterState +#include "pycore_parser.h" // struct _parser_runtime_state +#include "pycore_pymem.h" // struct _pymem_allocators +#include "pycore_pyhash.h" // struct pyhash_runtime_state +#include "pycore_pythread.h" // struct _pythread_runtime_state +#include "pycore_obmalloc.h" // struct obmalloc_state +#include "pycore_signal.h" // struct _signals_runtime_state +#include "pycore_time.h" // struct _time_runtime_state +#include "pycore_tracemalloc.h" // struct _tracemalloc_runtime_state #include "pycore_unicodeobject.h" // struct _Py_unicode_runtime_ids struct _getargs_runtime_state { - PyThread_type_lock mutex; + PyThread_type_lock mutex; + struct _PyArg_Parser *static_parsers; }; /* ceval state */ -struct _ceval_runtime_state { - /* Request for checking signals. It is shared by all interpreters (see - bpo-40513). Any thread of any interpreter can receive a signal, but only - the main thread of the main interpreter can handle signals: see - _Py_ThreadCanHandleSignals(). */ - _Py_atomic_int signals_pending; - struct _gil_runtime_state gil; -}; - /* GIL state */ struct _gilstate_runtime_state { @@ -85,6 +91,13 @@ typedef struct pyruntimestate { to access it, don't access it directly. */ _Py_atomic_address _finalizing; + struct _pymem_allocators allocators; + struct _obmalloc_state obmalloc; + struct pyhash_runtime_state pyhash_state; + struct _time_runtime_state time; + struct _pythread_runtime_state threads; + struct _signals_runtime_state signals; + struct pyinterpreters { PyThread_type_lock mutex; /* The linked list of interpreters, newest first. */ @@ -111,13 +124,22 @@ typedef struct pyruntimestate { unsigned long main_thread; + PyWideStringList orig_argv; + + struct _parser_runtime_state parser; + #define NEXITFUNCS 32 void (*exitfuncs[NEXITFUNCS])(void); int nexitfuncs; + struct _import_runtime_state imports; struct _ceval_runtime_state ceval; struct _gilstate_runtime_state gilstate; struct _getargs_runtime_state getargs; + struct _dtoa_runtime_state dtoa; + struct _fileutils_state fileutils; + struct _faulthandler_runtime_state faulthandler; + struct _tracemalloc_runtime_state tracemalloc; PyPreConfig preconfig; @@ -127,10 +149,21 @@ typedef struct pyruntimestate { void *open_code_userdata; _Py_AuditHookEntry *audit_hook_head; - struct _Py_unicode_runtime_ids unicode_ids; + struct _Py_float_runtime_state float_state; + struct _Py_unicode_runtime_state unicode_state; + struct _Py_dict_runtime_state dict_state; + struct _py_func_runtime_state func_state; + + struct { + /* Used to set PyTypeObject.tp_version_tag */ + // bpo-42745: next_version_tag remains shared by all interpreters + // because of static types. + unsigned int next_version_tag; + } types; /* All the objects that are shared by the runtime's interpreters. */ - struct _Py_global_objects global_objects; + struct _Py_cached_objects cached_objects; + struct _Py_static_objects static_objects; /* The following fields are here to avoid allocation during init. The data is exposed through _PyRuntimeState pointer fields. diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index 8dd7a3128c6665..6342a28f4df911 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -9,26 +9,89 @@ extern "C" { #endif #include "pycore_object.h" +#include "pycore_pymem_init.h" +#include "pycore_obmalloc_init.h" /* The static initializers defined here should only be used in the runtime init code (in pystate.c and pylifecycle.c). */ -#define _PyRuntimeState_INIT \ +#define _PyRuntimeState_INIT(runtime) \ { \ + .allocators = { \ + _pymem_allocators_standard_INIT(runtime), \ + _pymem_allocators_debug_INIT, \ + _pymem_allocators_obj_arena_INIT, \ + }, \ + .obmalloc = _obmalloc_state_INIT(runtime.obmalloc), \ + .pyhash_state = pyhash_state_INIT, \ + .signals = _signals_RUNTIME_INIT, \ + .interpreters = { \ + /* This prevents interpreters from getting created \ + until _PyInterpreterState_Enable() is called. */ \ + .next_id = -1, \ + }, \ + .imports = { \ + .lock = { \ + .mutex = NULL, \ + .thread = PYTHREAD_INVALID_THREAD_ID, \ + .level = 0, \ + }, \ + .find_and_load = { \ + .header = 1, \ + }, \ + }, \ + .ceval = { \ + .perf = _PyEval_RUNTIME_PERF_INIT, \ + }, \ .gilstate = { \ .check_enabled = 1, \ /* A TSS key must be initialized with Py_tss_NEEDS_INIT \ in accordance with the specification. */ \ .autoTSSkey = Py_tss_NEEDS_INIT, \ }, \ - .interpreters = { \ - /* This prevents interpreters from getting created \ - until _PyInterpreterState_Enable() is called. */ \ - .next_id = -1, \ + .dtoa = _dtoa_runtime_state_INIT(runtime), \ + .fileutils = { \ + .force_ascii = -1, \ + }, \ + .faulthandler = _faulthandler_runtime_state_INIT, \ + .tracemalloc = _tracemalloc_runtime_state_INIT, \ + .float_state = { \ + .float_format = _py_float_format_unknown, \ + .double_format = _py_float_format_unknown, \ + }, \ + .dict_state = { \ + .next_keys_version = 2, \ + }, \ + .func_state = { \ + .next_version = 1, \ + }, \ + .types = { \ + .next_version_tag = 1, \ + }, \ + .static_objects = { \ + .singletons = { \ + .small_ints = _Py_small_ints_INIT, \ + .bytes_empty = _PyBytes_SIMPLE_INIT(0, 0), \ + .bytes_characters = _Py_bytes_characters_INIT, \ + .strings = { \ + .literals = _Py_str_literals_INIT, \ + .identifiers = _Py_str_identifiers_INIT, \ + .ascii = _Py_str_ascii_INIT, \ + .latin1 = _Py_str_latin1_INIT, \ + }, \ + .tuple_empty = { \ + .ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0) \ + }, \ + .hamt_bitmap_node_empty = { \ + .ob_base = _PyVarObject_IMMORTAL_INIT(&_PyHamt_BitmapNode_Type, 0) \ + }, \ + .context_token_missing = { \ + .ob_base = _PyObject_IMMORTAL_INIT(&_PyContextTokenMissing_Type), \ + }, \ + }, \ }, \ - .global_objects = _Py_global_objects_INIT, \ ._main_interpreter = _PyInterpreterState_INIT, \ } @@ -47,7 +110,6 @@ extern "C" { #define _PyInterpreterState_INIT \ { \ - ._static = 1, \ .id_refcount = -1, \ DLOPENFLAGS_INIT \ .ceval = { \ @@ -62,12 +124,20 @@ extern "C" { { .threshold = 10, }, \ }, \ }, \ + .static_objects = { \ + .singletons = { \ + ._not_used = 1, \ + .hamt_empty = { \ + .ob_base = _PyObject_IMMORTAL_INIT(&_PyHamt_Type), \ + .h_root = (PyHamtNode*)&_Py_SINGLETON(hamt_bitmap_node_empty), \ + }, \ + }, \ + }, \ ._initial_thread = _PyThreadState_INIT, \ } #define _PyThreadState_INIT \ { \ - ._static = 1, \ .py_recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \ .context_ver = 1, \ } @@ -110,9 +180,9 @@ extern "C" { ._data = (LITERAL) \ } #define INIT_STR(NAME, LITERAL) \ - ._ ## NAME = _PyASCIIObject_INIT(LITERAL) + ._py_ ## NAME = _PyASCIIObject_INIT(LITERAL) #define INIT_ID(NAME) \ - ._ ## NAME = _PyASCIIObject_INIT(#NAME) + ._py_ ## NAME = _PyASCIIObject_INIT(#NAME) #define _PyUnicode_LATIN1_INIT(LITERAL, UTF8) \ { \ ._latin1 = { \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index f7823d36ef819c..6d1b8702c77698 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -9,8240 +9,1479 @@ extern "C" { #endif /* The following is auto-generated by Tools/build/generate_global_objects.py. */ -#define _Py_global_objects_INIT { \ - .singletons = { \ - .small_ints = { \ - _PyLong_DIGIT_INIT(-5), \ - _PyLong_DIGIT_INIT(-4), \ - _PyLong_DIGIT_INIT(-3), \ - _PyLong_DIGIT_INIT(-2), \ - _PyLong_DIGIT_INIT(-1), \ - _PyLong_DIGIT_INIT(0), \ - _PyLong_DIGIT_INIT(1), \ - _PyLong_DIGIT_INIT(2), \ - _PyLong_DIGIT_INIT(3), \ - _PyLong_DIGIT_INIT(4), \ - _PyLong_DIGIT_INIT(5), \ - _PyLong_DIGIT_INIT(6), \ - _PyLong_DIGIT_INIT(7), \ - _PyLong_DIGIT_INIT(8), \ - _PyLong_DIGIT_INIT(9), \ - _PyLong_DIGIT_INIT(10), \ - _PyLong_DIGIT_INIT(11), \ - _PyLong_DIGIT_INIT(12), \ - _PyLong_DIGIT_INIT(13), \ - _PyLong_DIGIT_INIT(14), \ - _PyLong_DIGIT_INIT(15), \ - _PyLong_DIGIT_INIT(16), \ - _PyLong_DIGIT_INIT(17), \ - _PyLong_DIGIT_INIT(18), \ - _PyLong_DIGIT_INIT(19), \ - _PyLong_DIGIT_INIT(20), \ - _PyLong_DIGIT_INIT(21), \ - _PyLong_DIGIT_INIT(22), \ - _PyLong_DIGIT_INIT(23), \ - _PyLong_DIGIT_INIT(24), \ - _PyLong_DIGIT_INIT(25), \ - _PyLong_DIGIT_INIT(26), \ - _PyLong_DIGIT_INIT(27), \ - _PyLong_DIGIT_INIT(28), \ - _PyLong_DIGIT_INIT(29), \ - _PyLong_DIGIT_INIT(30), \ - _PyLong_DIGIT_INIT(31), \ - _PyLong_DIGIT_INIT(32), \ - _PyLong_DIGIT_INIT(33), \ - _PyLong_DIGIT_INIT(34), \ - _PyLong_DIGIT_INIT(35), \ - _PyLong_DIGIT_INIT(36), \ - _PyLong_DIGIT_INIT(37), \ - _PyLong_DIGIT_INIT(38), \ - _PyLong_DIGIT_INIT(39), \ - _PyLong_DIGIT_INIT(40), \ - _PyLong_DIGIT_INIT(41), \ - _PyLong_DIGIT_INIT(42), \ - _PyLong_DIGIT_INIT(43), \ - _PyLong_DIGIT_INIT(44), \ - _PyLong_DIGIT_INIT(45), \ - _PyLong_DIGIT_INIT(46), \ - _PyLong_DIGIT_INIT(47), \ - _PyLong_DIGIT_INIT(48), \ - _PyLong_DIGIT_INIT(49), \ - _PyLong_DIGIT_INIT(50), \ - _PyLong_DIGIT_INIT(51), \ - _PyLong_DIGIT_INIT(52), \ - _PyLong_DIGIT_INIT(53), \ - _PyLong_DIGIT_INIT(54), \ - _PyLong_DIGIT_INIT(55), \ - _PyLong_DIGIT_INIT(56), \ - _PyLong_DIGIT_INIT(57), \ - _PyLong_DIGIT_INIT(58), \ - _PyLong_DIGIT_INIT(59), \ - _PyLong_DIGIT_INIT(60), \ - _PyLong_DIGIT_INIT(61), \ - _PyLong_DIGIT_INIT(62), \ - _PyLong_DIGIT_INIT(63), \ - _PyLong_DIGIT_INIT(64), \ - _PyLong_DIGIT_INIT(65), \ - _PyLong_DIGIT_INIT(66), \ - _PyLong_DIGIT_INIT(67), \ - _PyLong_DIGIT_INIT(68), \ - _PyLong_DIGIT_INIT(69), \ - _PyLong_DIGIT_INIT(70), \ - _PyLong_DIGIT_INIT(71), \ - _PyLong_DIGIT_INIT(72), \ - _PyLong_DIGIT_INIT(73), \ - _PyLong_DIGIT_INIT(74), \ - _PyLong_DIGIT_INIT(75), \ - _PyLong_DIGIT_INIT(76), \ - _PyLong_DIGIT_INIT(77), \ - _PyLong_DIGIT_INIT(78), \ - _PyLong_DIGIT_INIT(79), \ - _PyLong_DIGIT_INIT(80), \ - _PyLong_DIGIT_INIT(81), \ - _PyLong_DIGIT_INIT(82), \ - _PyLong_DIGIT_INIT(83), \ - _PyLong_DIGIT_INIT(84), \ - _PyLong_DIGIT_INIT(85), \ - _PyLong_DIGIT_INIT(86), \ - _PyLong_DIGIT_INIT(87), \ - _PyLong_DIGIT_INIT(88), \ - _PyLong_DIGIT_INIT(89), \ - _PyLong_DIGIT_INIT(90), \ - _PyLong_DIGIT_INIT(91), \ - _PyLong_DIGIT_INIT(92), \ - _PyLong_DIGIT_INIT(93), \ - _PyLong_DIGIT_INIT(94), \ - _PyLong_DIGIT_INIT(95), \ - _PyLong_DIGIT_INIT(96), \ - _PyLong_DIGIT_INIT(97), \ - _PyLong_DIGIT_INIT(98), \ - _PyLong_DIGIT_INIT(99), \ - _PyLong_DIGIT_INIT(100), \ - _PyLong_DIGIT_INIT(101), \ - _PyLong_DIGIT_INIT(102), \ - _PyLong_DIGIT_INIT(103), \ - _PyLong_DIGIT_INIT(104), \ - _PyLong_DIGIT_INIT(105), \ - _PyLong_DIGIT_INIT(106), \ - _PyLong_DIGIT_INIT(107), \ - _PyLong_DIGIT_INIT(108), \ - _PyLong_DIGIT_INIT(109), \ - _PyLong_DIGIT_INIT(110), \ - _PyLong_DIGIT_INIT(111), \ - _PyLong_DIGIT_INIT(112), \ - _PyLong_DIGIT_INIT(113), \ - _PyLong_DIGIT_INIT(114), \ - _PyLong_DIGIT_INIT(115), \ - _PyLong_DIGIT_INIT(116), \ - _PyLong_DIGIT_INIT(117), \ - _PyLong_DIGIT_INIT(118), \ - _PyLong_DIGIT_INIT(119), \ - _PyLong_DIGIT_INIT(120), \ - _PyLong_DIGIT_INIT(121), \ - _PyLong_DIGIT_INIT(122), \ - _PyLong_DIGIT_INIT(123), \ - _PyLong_DIGIT_INIT(124), \ - _PyLong_DIGIT_INIT(125), \ - _PyLong_DIGIT_INIT(126), \ - _PyLong_DIGIT_INIT(127), \ - _PyLong_DIGIT_INIT(128), \ - _PyLong_DIGIT_INIT(129), \ - _PyLong_DIGIT_INIT(130), \ - _PyLong_DIGIT_INIT(131), \ - _PyLong_DIGIT_INIT(132), \ - _PyLong_DIGIT_INIT(133), \ - _PyLong_DIGIT_INIT(134), \ - _PyLong_DIGIT_INIT(135), \ - _PyLong_DIGIT_INIT(136), \ - _PyLong_DIGIT_INIT(137), \ - _PyLong_DIGIT_INIT(138), \ - _PyLong_DIGIT_INIT(139), \ - _PyLong_DIGIT_INIT(140), \ - _PyLong_DIGIT_INIT(141), \ - _PyLong_DIGIT_INIT(142), \ - _PyLong_DIGIT_INIT(143), \ - _PyLong_DIGIT_INIT(144), \ - _PyLong_DIGIT_INIT(145), \ - _PyLong_DIGIT_INIT(146), \ - _PyLong_DIGIT_INIT(147), \ - _PyLong_DIGIT_INIT(148), \ - _PyLong_DIGIT_INIT(149), \ - _PyLong_DIGIT_INIT(150), \ - _PyLong_DIGIT_INIT(151), \ - _PyLong_DIGIT_INIT(152), \ - _PyLong_DIGIT_INIT(153), \ - _PyLong_DIGIT_INIT(154), \ - _PyLong_DIGIT_INIT(155), \ - _PyLong_DIGIT_INIT(156), \ - _PyLong_DIGIT_INIT(157), \ - _PyLong_DIGIT_INIT(158), \ - _PyLong_DIGIT_INIT(159), \ - _PyLong_DIGIT_INIT(160), \ - _PyLong_DIGIT_INIT(161), \ - _PyLong_DIGIT_INIT(162), \ - _PyLong_DIGIT_INIT(163), \ - _PyLong_DIGIT_INIT(164), \ - _PyLong_DIGIT_INIT(165), \ - _PyLong_DIGIT_INIT(166), \ - _PyLong_DIGIT_INIT(167), \ - _PyLong_DIGIT_INIT(168), \ - _PyLong_DIGIT_INIT(169), \ - _PyLong_DIGIT_INIT(170), \ - _PyLong_DIGIT_INIT(171), \ - _PyLong_DIGIT_INIT(172), \ - _PyLong_DIGIT_INIT(173), \ - _PyLong_DIGIT_INIT(174), \ - _PyLong_DIGIT_INIT(175), \ - _PyLong_DIGIT_INIT(176), \ - _PyLong_DIGIT_INIT(177), \ - _PyLong_DIGIT_INIT(178), \ - _PyLong_DIGIT_INIT(179), \ - _PyLong_DIGIT_INIT(180), \ - _PyLong_DIGIT_INIT(181), \ - _PyLong_DIGIT_INIT(182), \ - _PyLong_DIGIT_INIT(183), \ - _PyLong_DIGIT_INIT(184), \ - _PyLong_DIGIT_INIT(185), \ - _PyLong_DIGIT_INIT(186), \ - _PyLong_DIGIT_INIT(187), \ - _PyLong_DIGIT_INIT(188), \ - _PyLong_DIGIT_INIT(189), \ - _PyLong_DIGIT_INIT(190), \ - _PyLong_DIGIT_INIT(191), \ - _PyLong_DIGIT_INIT(192), \ - _PyLong_DIGIT_INIT(193), \ - _PyLong_DIGIT_INIT(194), \ - _PyLong_DIGIT_INIT(195), \ - _PyLong_DIGIT_INIT(196), \ - _PyLong_DIGIT_INIT(197), \ - _PyLong_DIGIT_INIT(198), \ - _PyLong_DIGIT_INIT(199), \ - _PyLong_DIGIT_INIT(200), \ - _PyLong_DIGIT_INIT(201), \ - _PyLong_DIGIT_INIT(202), \ - _PyLong_DIGIT_INIT(203), \ - _PyLong_DIGIT_INIT(204), \ - _PyLong_DIGIT_INIT(205), \ - _PyLong_DIGIT_INIT(206), \ - _PyLong_DIGIT_INIT(207), \ - _PyLong_DIGIT_INIT(208), \ - _PyLong_DIGIT_INIT(209), \ - _PyLong_DIGIT_INIT(210), \ - _PyLong_DIGIT_INIT(211), \ - _PyLong_DIGIT_INIT(212), \ - _PyLong_DIGIT_INIT(213), \ - _PyLong_DIGIT_INIT(214), \ - _PyLong_DIGIT_INIT(215), \ - _PyLong_DIGIT_INIT(216), \ - _PyLong_DIGIT_INIT(217), \ - _PyLong_DIGIT_INIT(218), \ - _PyLong_DIGIT_INIT(219), \ - _PyLong_DIGIT_INIT(220), \ - _PyLong_DIGIT_INIT(221), \ - _PyLong_DIGIT_INIT(222), \ - _PyLong_DIGIT_INIT(223), \ - _PyLong_DIGIT_INIT(224), \ - _PyLong_DIGIT_INIT(225), \ - _PyLong_DIGIT_INIT(226), \ - _PyLong_DIGIT_INIT(227), \ - _PyLong_DIGIT_INIT(228), \ - _PyLong_DIGIT_INIT(229), \ - _PyLong_DIGIT_INIT(230), \ - _PyLong_DIGIT_INIT(231), \ - _PyLong_DIGIT_INIT(232), \ - _PyLong_DIGIT_INIT(233), \ - _PyLong_DIGIT_INIT(234), \ - _PyLong_DIGIT_INIT(235), \ - _PyLong_DIGIT_INIT(236), \ - _PyLong_DIGIT_INIT(237), \ - _PyLong_DIGIT_INIT(238), \ - _PyLong_DIGIT_INIT(239), \ - _PyLong_DIGIT_INIT(240), \ - _PyLong_DIGIT_INIT(241), \ - _PyLong_DIGIT_INIT(242), \ - _PyLong_DIGIT_INIT(243), \ - _PyLong_DIGIT_INIT(244), \ - _PyLong_DIGIT_INIT(245), \ - _PyLong_DIGIT_INIT(246), \ - _PyLong_DIGIT_INIT(247), \ - _PyLong_DIGIT_INIT(248), \ - _PyLong_DIGIT_INIT(249), \ - _PyLong_DIGIT_INIT(250), \ - _PyLong_DIGIT_INIT(251), \ - _PyLong_DIGIT_INIT(252), \ - _PyLong_DIGIT_INIT(253), \ - _PyLong_DIGIT_INIT(254), \ - _PyLong_DIGIT_INIT(255), \ - _PyLong_DIGIT_INIT(256), \ - }, \ - \ - .bytes_empty = _PyBytes_SIMPLE_INIT(0, 0), \ - .bytes_characters = { \ - _PyBytes_CHAR_INIT(0), \ - _PyBytes_CHAR_INIT(1), \ - _PyBytes_CHAR_INIT(2), \ - _PyBytes_CHAR_INIT(3), \ - _PyBytes_CHAR_INIT(4), \ - _PyBytes_CHAR_INIT(5), \ - _PyBytes_CHAR_INIT(6), \ - _PyBytes_CHAR_INIT(7), \ - _PyBytes_CHAR_INIT(8), \ - _PyBytes_CHAR_INIT(9), \ - _PyBytes_CHAR_INIT(10), \ - _PyBytes_CHAR_INIT(11), \ - _PyBytes_CHAR_INIT(12), \ - _PyBytes_CHAR_INIT(13), \ - _PyBytes_CHAR_INIT(14), \ - _PyBytes_CHAR_INIT(15), \ - _PyBytes_CHAR_INIT(16), \ - _PyBytes_CHAR_INIT(17), \ - _PyBytes_CHAR_INIT(18), \ - _PyBytes_CHAR_INIT(19), \ - _PyBytes_CHAR_INIT(20), \ - _PyBytes_CHAR_INIT(21), \ - _PyBytes_CHAR_INIT(22), \ - _PyBytes_CHAR_INIT(23), \ - _PyBytes_CHAR_INIT(24), \ - _PyBytes_CHAR_INIT(25), \ - _PyBytes_CHAR_INIT(26), \ - _PyBytes_CHAR_INIT(27), \ - _PyBytes_CHAR_INIT(28), \ - _PyBytes_CHAR_INIT(29), \ - _PyBytes_CHAR_INIT(30), \ - _PyBytes_CHAR_INIT(31), \ - _PyBytes_CHAR_INIT(32), \ - _PyBytes_CHAR_INIT(33), \ - _PyBytes_CHAR_INIT(34), \ - _PyBytes_CHAR_INIT(35), \ - _PyBytes_CHAR_INIT(36), \ - _PyBytes_CHAR_INIT(37), \ - _PyBytes_CHAR_INIT(38), \ - _PyBytes_CHAR_INIT(39), \ - _PyBytes_CHAR_INIT(40), \ - _PyBytes_CHAR_INIT(41), \ - _PyBytes_CHAR_INIT(42), \ - _PyBytes_CHAR_INIT(43), \ - _PyBytes_CHAR_INIT(44), \ - _PyBytes_CHAR_INIT(45), \ - _PyBytes_CHAR_INIT(46), \ - _PyBytes_CHAR_INIT(47), \ - _PyBytes_CHAR_INIT(48), \ - _PyBytes_CHAR_INIT(49), \ - _PyBytes_CHAR_INIT(50), \ - _PyBytes_CHAR_INIT(51), \ - _PyBytes_CHAR_INIT(52), \ - _PyBytes_CHAR_INIT(53), \ - _PyBytes_CHAR_INIT(54), \ - _PyBytes_CHAR_INIT(55), \ - _PyBytes_CHAR_INIT(56), \ - _PyBytes_CHAR_INIT(57), \ - _PyBytes_CHAR_INIT(58), \ - _PyBytes_CHAR_INIT(59), \ - _PyBytes_CHAR_INIT(60), \ - _PyBytes_CHAR_INIT(61), \ - _PyBytes_CHAR_INIT(62), \ - _PyBytes_CHAR_INIT(63), \ - _PyBytes_CHAR_INIT(64), \ - _PyBytes_CHAR_INIT(65), \ - _PyBytes_CHAR_INIT(66), \ - _PyBytes_CHAR_INIT(67), \ - _PyBytes_CHAR_INIT(68), \ - _PyBytes_CHAR_INIT(69), \ - _PyBytes_CHAR_INIT(70), \ - _PyBytes_CHAR_INIT(71), \ - _PyBytes_CHAR_INIT(72), \ - _PyBytes_CHAR_INIT(73), \ - _PyBytes_CHAR_INIT(74), \ - _PyBytes_CHAR_INIT(75), \ - _PyBytes_CHAR_INIT(76), \ - _PyBytes_CHAR_INIT(77), \ - _PyBytes_CHAR_INIT(78), \ - _PyBytes_CHAR_INIT(79), \ - _PyBytes_CHAR_INIT(80), \ - _PyBytes_CHAR_INIT(81), \ - _PyBytes_CHAR_INIT(82), \ - _PyBytes_CHAR_INIT(83), \ - _PyBytes_CHAR_INIT(84), \ - _PyBytes_CHAR_INIT(85), \ - _PyBytes_CHAR_INIT(86), \ - _PyBytes_CHAR_INIT(87), \ - _PyBytes_CHAR_INIT(88), \ - _PyBytes_CHAR_INIT(89), \ - _PyBytes_CHAR_INIT(90), \ - _PyBytes_CHAR_INIT(91), \ - _PyBytes_CHAR_INIT(92), \ - _PyBytes_CHAR_INIT(93), \ - _PyBytes_CHAR_INIT(94), \ - _PyBytes_CHAR_INIT(95), \ - _PyBytes_CHAR_INIT(96), \ - _PyBytes_CHAR_INIT(97), \ - _PyBytes_CHAR_INIT(98), \ - _PyBytes_CHAR_INIT(99), \ - _PyBytes_CHAR_INIT(100), \ - _PyBytes_CHAR_INIT(101), \ - _PyBytes_CHAR_INIT(102), \ - _PyBytes_CHAR_INIT(103), \ - _PyBytes_CHAR_INIT(104), \ - _PyBytes_CHAR_INIT(105), \ - _PyBytes_CHAR_INIT(106), \ - _PyBytes_CHAR_INIT(107), \ - _PyBytes_CHAR_INIT(108), \ - _PyBytes_CHAR_INIT(109), \ - _PyBytes_CHAR_INIT(110), \ - _PyBytes_CHAR_INIT(111), \ - _PyBytes_CHAR_INIT(112), \ - _PyBytes_CHAR_INIT(113), \ - _PyBytes_CHAR_INIT(114), \ - _PyBytes_CHAR_INIT(115), \ - _PyBytes_CHAR_INIT(116), \ - _PyBytes_CHAR_INIT(117), \ - _PyBytes_CHAR_INIT(118), \ - _PyBytes_CHAR_INIT(119), \ - _PyBytes_CHAR_INIT(120), \ - _PyBytes_CHAR_INIT(121), \ - _PyBytes_CHAR_INIT(122), \ - _PyBytes_CHAR_INIT(123), \ - _PyBytes_CHAR_INIT(124), \ - _PyBytes_CHAR_INIT(125), \ - _PyBytes_CHAR_INIT(126), \ - _PyBytes_CHAR_INIT(127), \ - _PyBytes_CHAR_INIT(128), \ - _PyBytes_CHAR_INIT(129), \ - _PyBytes_CHAR_INIT(130), \ - _PyBytes_CHAR_INIT(131), \ - _PyBytes_CHAR_INIT(132), \ - _PyBytes_CHAR_INIT(133), \ - _PyBytes_CHAR_INIT(134), \ - _PyBytes_CHAR_INIT(135), \ - _PyBytes_CHAR_INIT(136), \ - _PyBytes_CHAR_INIT(137), \ - _PyBytes_CHAR_INIT(138), \ - _PyBytes_CHAR_INIT(139), \ - _PyBytes_CHAR_INIT(140), \ - _PyBytes_CHAR_INIT(141), \ - _PyBytes_CHAR_INIT(142), \ - _PyBytes_CHAR_INIT(143), \ - _PyBytes_CHAR_INIT(144), \ - _PyBytes_CHAR_INIT(145), \ - _PyBytes_CHAR_INIT(146), \ - _PyBytes_CHAR_INIT(147), \ - _PyBytes_CHAR_INIT(148), \ - _PyBytes_CHAR_INIT(149), \ - _PyBytes_CHAR_INIT(150), \ - _PyBytes_CHAR_INIT(151), \ - _PyBytes_CHAR_INIT(152), \ - _PyBytes_CHAR_INIT(153), \ - _PyBytes_CHAR_INIT(154), \ - _PyBytes_CHAR_INIT(155), \ - _PyBytes_CHAR_INIT(156), \ - _PyBytes_CHAR_INIT(157), \ - _PyBytes_CHAR_INIT(158), \ - _PyBytes_CHAR_INIT(159), \ - _PyBytes_CHAR_INIT(160), \ - _PyBytes_CHAR_INIT(161), \ - _PyBytes_CHAR_INIT(162), \ - _PyBytes_CHAR_INIT(163), \ - _PyBytes_CHAR_INIT(164), \ - _PyBytes_CHAR_INIT(165), \ - _PyBytes_CHAR_INIT(166), \ - _PyBytes_CHAR_INIT(167), \ - _PyBytes_CHAR_INIT(168), \ - _PyBytes_CHAR_INIT(169), \ - _PyBytes_CHAR_INIT(170), \ - _PyBytes_CHAR_INIT(171), \ - _PyBytes_CHAR_INIT(172), \ - _PyBytes_CHAR_INIT(173), \ - _PyBytes_CHAR_INIT(174), \ - _PyBytes_CHAR_INIT(175), \ - _PyBytes_CHAR_INIT(176), \ - _PyBytes_CHAR_INIT(177), \ - _PyBytes_CHAR_INIT(178), \ - _PyBytes_CHAR_INIT(179), \ - _PyBytes_CHAR_INIT(180), \ - _PyBytes_CHAR_INIT(181), \ - _PyBytes_CHAR_INIT(182), \ - _PyBytes_CHAR_INIT(183), \ - _PyBytes_CHAR_INIT(184), \ - _PyBytes_CHAR_INIT(185), \ - _PyBytes_CHAR_INIT(186), \ - _PyBytes_CHAR_INIT(187), \ - _PyBytes_CHAR_INIT(188), \ - _PyBytes_CHAR_INIT(189), \ - _PyBytes_CHAR_INIT(190), \ - _PyBytes_CHAR_INIT(191), \ - _PyBytes_CHAR_INIT(192), \ - _PyBytes_CHAR_INIT(193), \ - _PyBytes_CHAR_INIT(194), \ - _PyBytes_CHAR_INIT(195), \ - _PyBytes_CHAR_INIT(196), \ - _PyBytes_CHAR_INIT(197), \ - _PyBytes_CHAR_INIT(198), \ - _PyBytes_CHAR_INIT(199), \ - _PyBytes_CHAR_INIT(200), \ - _PyBytes_CHAR_INIT(201), \ - _PyBytes_CHAR_INIT(202), \ - _PyBytes_CHAR_INIT(203), \ - _PyBytes_CHAR_INIT(204), \ - _PyBytes_CHAR_INIT(205), \ - _PyBytes_CHAR_INIT(206), \ - _PyBytes_CHAR_INIT(207), \ - _PyBytes_CHAR_INIT(208), \ - _PyBytes_CHAR_INIT(209), \ - _PyBytes_CHAR_INIT(210), \ - _PyBytes_CHAR_INIT(211), \ - _PyBytes_CHAR_INIT(212), \ - _PyBytes_CHAR_INIT(213), \ - _PyBytes_CHAR_INIT(214), \ - _PyBytes_CHAR_INIT(215), \ - _PyBytes_CHAR_INIT(216), \ - _PyBytes_CHAR_INIT(217), \ - _PyBytes_CHAR_INIT(218), \ - _PyBytes_CHAR_INIT(219), \ - _PyBytes_CHAR_INIT(220), \ - _PyBytes_CHAR_INIT(221), \ - _PyBytes_CHAR_INIT(222), \ - _PyBytes_CHAR_INIT(223), \ - _PyBytes_CHAR_INIT(224), \ - _PyBytes_CHAR_INIT(225), \ - _PyBytes_CHAR_INIT(226), \ - _PyBytes_CHAR_INIT(227), \ - _PyBytes_CHAR_INIT(228), \ - _PyBytes_CHAR_INIT(229), \ - _PyBytes_CHAR_INIT(230), \ - _PyBytes_CHAR_INIT(231), \ - _PyBytes_CHAR_INIT(232), \ - _PyBytes_CHAR_INIT(233), \ - _PyBytes_CHAR_INIT(234), \ - _PyBytes_CHAR_INIT(235), \ - _PyBytes_CHAR_INIT(236), \ - _PyBytes_CHAR_INIT(237), \ - _PyBytes_CHAR_INIT(238), \ - _PyBytes_CHAR_INIT(239), \ - _PyBytes_CHAR_INIT(240), \ - _PyBytes_CHAR_INIT(241), \ - _PyBytes_CHAR_INIT(242), \ - _PyBytes_CHAR_INIT(243), \ - _PyBytes_CHAR_INIT(244), \ - _PyBytes_CHAR_INIT(245), \ - _PyBytes_CHAR_INIT(246), \ - _PyBytes_CHAR_INIT(247), \ - _PyBytes_CHAR_INIT(248), \ - _PyBytes_CHAR_INIT(249), \ - _PyBytes_CHAR_INIT(250), \ - _PyBytes_CHAR_INIT(251), \ - _PyBytes_CHAR_INIT(252), \ - _PyBytes_CHAR_INIT(253), \ - _PyBytes_CHAR_INIT(254), \ - _PyBytes_CHAR_INIT(255), \ - }, \ - \ - .strings = { \ - .literals = { \ - INIT_STR(anon_dictcomp, "<dictcomp>"), \ - INIT_STR(anon_genexpr, "<genexpr>"), \ - INIT_STR(anon_lambda, "<lambda>"), \ - INIT_STR(anon_listcomp, "<listcomp>"), \ - INIT_STR(anon_module, "<module>"), \ - INIT_STR(anon_setcomp, "<setcomp>"), \ - INIT_STR(anon_string, "<string>"), \ - INIT_STR(anon_unknown, "<unknown>"), \ - INIT_STR(close_br, "}"), \ - INIT_STR(comma_sep, ", "), \ - INIT_STR(dbl_close_br, "}}"), \ - INIT_STR(dbl_open_br, "{{"), \ - INIT_STR(dbl_percent, "%%"), \ - INIT_STR(dot, "."), \ - INIT_STR(dot_locals, ".<locals>"), \ - INIT_STR(empty, ""), \ - INIT_STR(list_err, "list index out of range"), \ - INIT_STR(newline, "\n"), \ - INIT_STR(open_br, "{"), \ - INIT_STR(percent, "%"), \ - INIT_STR(utf_8, "utf-8"), \ - }, \ - .identifiers = { \ - INIT_ID(False), \ - INIT_ID(Py_Repr), \ - INIT_ID(TextIOWrapper), \ - INIT_ID(True), \ - INIT_ID(WarningMessage), \ - INIT_ID(_), \ - INIT_ID(__IOBase_closed), \ - INIT_ID(__abc_tpflags__), \ - INIT_ID(__abs__), \ - INIT_ID(__abstractmethods__), \ - INIT_ID(__add__), \ - INIT_ID(__aenter__), \ - INIT_ID(__aexit__), \ - INIT_ID(__aiter__), \ - INIT_ID(__all__), \ - INIT_ID(__and__), \ - INIT_ID(__anext__), \ - INIT_ID(__annotations__), \ - INIT_ID(__args__), \ - INIT_ID(__await__), \ - INIT_ID(__bases__), \ - INIT_ID(__bool__), \ - INIT_ID(__build_class__), \ - INIT_ID(__builtins__), \ - INIT_ID(__bytes__), \ - INIT_ID(__call__), \ - INIT_ID(__cantrace__), \ - INIT_ID(__class__), \ - INIT_ID(__class_getitem__), \ - INIT_ID(__classcell__), \ - INIT_ID(__complex__), \ - INIT_ID(__contains__), \ - INIT_ID(__copy__), \ - INIT_ID(__del__), \ - INIT_ID(__delattr__), \ - INIT_ID(__delete__), \ - INIT_ID(__delitem__), \ - INIT_ID(__dict__), \ - INIT_ID(__dictoffset__), \ - INIT_ID(__dir__), \ - INIT_ID(__divmod__), \ - INIT_ID(__doc__), \ - INIT_ID(__enter__), \ - INIT_ID(__eq__), \ - INIT_ID(__exit__), \ - INIT_ID(__file__), \ - INIT_ID(__float__), \ - INIT_ID(__floordiv__), \ - INIT_ID(__format__), \ - INIT_ID(__fspath__), \ - INIT_ID(__ge__), \ - INIT_ID(__get__), \ - INIT_ID(__getattr__), \ - INIT_ID(__getattribute__), \ - INIT_ID(__getinitargs__), \ - INIT_ID(__getitem__), \ - INIT_ID(__getnewargs__), \ - INIT_ID(__getnewargs_ex__), \ - INIT_ID(__getstate__), \ - INIT_ID(__gt__), \ - INIT_ID(__hash__), \ - INIT_ID(__iadd__), \ - INIT_ID(__iand__), \ - INIT_ID(__ifloordiv__), \ - INIT_ID(__ilshift__), \ - INIT_ID(__imatmul__), \ - INIT_ID(__imod__), \ - INIT_ID(__import__), \ - INIT_ID(__imul__), \ - INIT_ID(__index__), \ - INIT_ID(__init__), \ - INIT_ID(__init_subclass__), \ - INIT_ID(__instancecheck__), \ - INIT_ID(__int__), \ - INIT_ID(__invert__), \ - INIT_ID(__ior__), \ - INIT_ID(__ipow__), \ - INIT_ID(__irshift__), \ - INIT_ID(__isabstractmethod__), \ - INIT_ID(__isub__), \ - INIT_ID(__iter__), \ - INIT_ID(__itruediv__), \ - INIT_ID(__ixor__), \ - INIT_ID(__le__), \ - INIT_ID(__len__), \ - INIT_ID(__length_hint__), \ - INIT_ID(__lltrace__), \ - INIT_ID(__loader__), \ - INIT_ID(__lshift__), \ - INIT_ID(__lt__), \ - INIT_ID(__main__), \ - INIT_ID(__matmul__), \ - INIT_ID(__missing__), \ - INIT_ID(__mod__), \ - INIT_ID(__module__), \ - INIT_ID(__mro_entries__), \ - INIT_ID(__mul__), \ - INIT_ID(__name__), \ - INIT_ID(__ne__), \ - INIT_ID(__neg__), \ - INIT_ID(__new__), \ - INIT_ID(__newobj__), \ - INIT_ID(__newobj_ex__), \ - INIT_ID(__next__), \ - INIT_ID(__notes__), \ - INIT_ID(__or__), \ - INIT_ID(__orig_class__), \ - INIT_ID(__origin__), \ - INIT_ID(__package__), \ - INIT_ID(__parameters__), \ - INIT_ID(__path__), \ - INIT_ID(__pos__), \ - INIT_ID(__pow__), \ - INIT_ID(__prepare__), \ - INIT_ID(__qualname__), \ - INIT_ID(__radd__), \ - INIT_ID(__rand__), \ - INIT_ID(__rdivmod__), \ - INIT_ID(__reduce__), \ - INIT_ID(__reduce_ex__), \ - INIT_ID(__repr__), \ - INIT_ID(__reversed__), \ - INIT_ID(__rfloordiv__), \ - INIT_ID(__rlshift__), \ - INIT_ID(__rmatmul__), \ - INIT_ID(__rmod__), \ - INIT_ID(__rmul__), \ - INIT_ID(__ror__), \ - INIT_ID(__round__), \ - INIT_ID(__rpow__), \ - INIT_ID(__rrshift__), \ - INIT_ID(__rshift__), \ - INIT_ID(__rsub__), \ - INIT_ID(__rtruediv__), \ - INIT_ID(__rxor__), \ - INIT_ID(__set__), \ - INIT_ID(__set_name__), \ - INIT_ID(__setattr__), \ - INIT_ID(__setitem__), \ - INIT_ID(__setstate__), \ - INIT_ID(__sizeof__), \ - INIT_ID(__slotnames__), \ - INIT_ID(__slots__), \ - INIT_ID(__spec__), \ - INIT_ID(__str__), \ - INIT_ID(__sub__), \ - INIT_ID(__subclasscheck__), \ - INIT_ID(__subclasshook__), \ - INIT_ID(__truediv__), \ - INIT_ID(__trunc__), \ - INIT_ID(__typing_is_unpacked_typevartuple__), \ - INIT_ID(__typing_prepare_subst__), \ - INIT_ID(__typing_subst__), \ - INIT_ID(__typing_unpacked_tuple_args__), \ - INIT_ID(__warningregistry__), \ - INIT_ID(__weaklistoffset__), \ - INIT_ID(__weakref__), \ - INIT_ID(__xor__), \ - INIT_ID(_abc_impl), \ - INIT_ID(_annotation), \ - INIT_ID(_blksize), \ - INIT_ID(_bootstrap), \ - INIT_ID(_dealloc_warn), \ - INIT_ID(_feature_version), \ - INIT_ID(_finalizing), \ - INIT_ID(_find_and_load), \ - INIT_ID(_fix_up_module), \ - INIT_ID(_get_sourcefile), \ - INIT_ID(_handle_fromlist), \ - INIT_ID(_initializing), \ - INIT_ID(_is_text_encoding), \ - INIT_ID(_lock_unlock_module), \ - INIT_ID(_showwarnmsg), \ - INIT_ID(_shutdown), \ - INIT_ID(_slotnames), \ - INIT_ID(_uninitialized_submodules), \ - INIT_ID(_warn_unawaited_coroutine), \ - INIT_ID(_xoptions), \ - INIT_ID(a), \ - INIT_ID(abs_tol), \ - INIT_ID(access), \ - INIT_ID(add), \ - INIT_ID(after_in_child), \ - INIT_ID(after_in_parent), \ - INIT_ID(aggregate_class), \ - INIT_ID(append), \ - INIT_ID(argdefs), \ - INIT_ID(arguments), \ - INIT_ID(argv), \ - INIT_ID(attribute), \ - INIT_ID(authorizer_callback), \ - INIT_ID(b), \ - INIT_ID(backtick), \ - INIT_ID(base), \ - INIT_ID(before), \ - INIT_ID(big), \ - INIT_ID(binary_form), \ - INIT_ID(block), \ - INIT_ID(buffer), \ - INIT_ID(buffer_callback), \ - INIT_ID(buffer_size), \ - INIT_ID(buffering), \ - INIT_ID(buffers), \ - INIT_ID(bufsize), \ - INIT_ID(builtins), \ - INIT_ID(byteorder), \ - INIT_ID(bytes), \ - INIT_ID(bytes_per_sep), \ - INIT_ID(c_call), \ - INIT_ID(c_exception), \ - INIT_ID(c_return), \ - INIT_ID(cached_statements), \ - INIT_ID(cadata), \ - INIT_ID(cafile), \ - INIT_ID(call), \ - INIT_ID(capath), \ - INIT_ID(category), \ - INIT_ID(cb_type), \ - INIT_ID(certfile), \ - INIT_ID(check_same_thread), \ - INIT_ID(clear), \ - INIT_ID(close), \ - INIT_ID(closed), \ - INIT_ID(closefd), \ - INIT_ID(closure), \ - INIT_ID(co_argcount), \ - INIT_ID(co_cellvars), \ - INIT_ID(co_code), \ - INIT_ID(co_consts), \ - INIT_ID(co_exceptiontable), \ - INIT_ID(co_filename), \ - INIT_ID(co_firstlineno), \ - INIT_ID(co_flags), \ - INIT_ID(co_freevars), \ - INIT_ID(co_kwonlyargcount), \ - INIT_ID(co_linetable), \ - INIT_ID(co_name), \ - INIT_ID(co_names), \ - INIT_ID(co_nlocals), \ - INIT_ID(co_posonlyargcount), \ - INIT_ID(co_qualname), \ - INIT_ID(co_stacksize), \ - INIT_ID(co_varnames), \ - INIT_ID(code), \ - INIT_ID(command), \ - INIT_ID(comment_factory), \ - INIT_ID(consts), \ - INIT_ID(context), \ - INIT_ID(cookie), \ - INIT_ID(copy), \ - INIT_ID(copyreg), \ - INIT_ID(coro), \ - INIT_ID(count), \ - INIT_ID(cwd), \ - INIT_ID(data), \ - INIT_ID(database), \ - INIT_ID(decode), \ - INIT_ID(decoder), \ - INIT_ID(default), \ - INIT_ID(defaultaction), \ - INIT_ID(delete), \ - INIT_ID(depth), \ - INIT_ID(detect_types), \ - INIT_ID(deterministic), \ - INIT_ID(device), \ - INIT_ID(dict), \ - INIT_ID(dictcomp), \ - INIT_ID(difference_update), \ - INIT_ID(digest), \ - INIT_ID(digest_size), \ - INIT_ID(digestmod), \ - INIT_ID(dir_fd), \ - INIT_ID(dispatch_table), \ - INIT_ID(displayhook), \ - INIT_ID(dklen), \ - INIT_ID(doc), \ - INIT_ID(dont_inherit), \ - INIT_ID(dst), \ - INIT_ID(dst_dir_fd), \ - INIT_ID(duration), \ - INIT_ID(effective_ids), \ - INIT_ID(element_factory), \ - INIT_ID(encode), \ - INIT_ID(encoding), \ - INIT_ID(end), \ - INIT_ID(end_lineno), \ - INIT_ID(end_offset), \ - INIT_ID(endpos), \ - INIT_ID(env), \ - INIT_ID(errors), \ - INIT_ID(event), \ - INIT_ID(eventmask), \ - INIT_ID(exc_type), \ - INIT_ID(exc_value), \ - INIT_ID(excepthook), \ - INIT_ID(exception), \ - INIT_ID(exp), \ - INIT_ID(extend), \ - INIT_ID(facility), \ - INIT_ID(factory), \ - INIT_ID(family), \ - INIT_ID(fanout), \ - INIT_ID(fd), \ - INIT_ID(fd2), \ - INIT_ID(fdel), \ - INIT_ID(fget), \ - INIT_ID(file), \ - INIT_ID(file_actions), \ - INIT_ID(filename), \ - INIT_ID(fileno), \ - INIT_ID(filepath), \ - INIT_ID(fillvalue), \ - INIT_ID(filters), \ - INIT_ID(final), \ - INIT_ID(find_class), \ - INIT_ID(fix_imports), \ - INIT_ID(flags), \ - INIT_ID(flush), \ - INIT_ID(follow_symlinks), \ - INIT_ID(format), \ - INIT_ID(frequency), \ - INIT_ID(fromlist), \ - INIT_ID(fset), \ - INIT_ID(func), \ - INIT_ID(generation), \ - INIT_ID(genexpr), \ - INIT_ID(get), \ - INIT_ID(get_source), \ - INIT_ID(getattr), \ - INIT_ID(getstate), \ - INIT_ID(gid), \ - INIT_ID(globals), \ - INIT_ID(groupindex), \ - INIT_ID(groups), \ - INIT_ID(handle), \ - INIT_ID(hash_name), \ - INIT_ID(header), \ - INIT_ID(headers), \ - INIT_ID(hi), \ - INIT_ID(hook), \ - INIT_ID(id), \ - INIT_ID(ident), \ - INIT_ID(ignore), \ - INIT_ID(imag), \ - INIT_ID(importlib), \ - INIT_ID(in_fd), \ - INIT_ID(incoming), \ - INIT_ID(indexgroup), \ - INIT_ID(inf), \ - INIT_ID(inheritable), \ - INIT_ID(initial), \ - INIT_ID(initial_bytes), \ - INIT_ID(initial_value), \ - INIT_ID(initval), \ - INIT_ID(inner_size), \ - INIT_ID(input), \ - INIT_ID(insert_comments), \ - INIT_ID(insert_pis), \ - INIT_ID(instructions), \ - INIT_ID(intern), \ - INIT_ID(intersection), \ - INIT_ID(isatty), \ - INIT_ID(isinstance), \ - INIT_ID(isolation_level), \ - INIT_ID(istext), \ - INIT_ID(item), \ - INIT_ID(items), \ - INIT_ID(iter), \ - INIT_ID(iterable), \ - INIT_ID(iterations), \ - INIT_ID(join), \ - INIT_ID(jump), \ - INIT_ID(keepends), \ - INIT_ID(key), \ - INIT_ID(keyfile), \ - INIT_ID(keys), \ - INIT_ID(kind), \ - INIT_ID(lambda), \ - INIT_ID(last), \ - INIT_ID(last_node), \ - INIT_ID(last_traceback), \ - INIT_ID(last_type), \ - INIT_ID(last_value), \ - INIT_ID(latin1), \ - INIT_ID(leaf_size), \ - INIT_ID(len), \ - INIT_ID(length), \ - INIT_ID(level), \ - INIT_ID(limit), \ - INIT_ID(line), \ - INIT_ID(line_buffering), \ - INIT_ID(lineno), \ - INIT_ID(listcomp), \ - INIT_ID(little), \ - INIT_ID(lo), \ - INIT_ID(locale), \ - INIT_ID(locals), \ - INIT_ID(logoption), \ - INIT_ID(loop), \ - INIT_ID(mapping), \ - INIT_ID(match), \ - INIT_ID(max_length), \ - INIT_ID(maxdigits), \ - INIT_ID(maxevents), \ - INIT_ID(maxmem), \ - INIT_ID(maxsplit), \ - INIT_ID(maxvalue), \ - INIT_ID(memLevel), \ - INIT_ID(memlimit), \ - INIT_ID(message), \ - INIT_ID(metaclass), \ - INIT_ID(method), \ - INIT_ID(mod), \ - INIT_ID(mode), \ - INIT_ID(module), \ - INIT_ID(module_globals), \ - INIT_ID(modules), \ - INIT_ID(mro), \ - INIT_ID(msg), \ - INIT_ID(mycmp), \ - INIT_ID(n), \ - INIT_ID(n_arg), \ - INIT_ID(n_fields), \ - INIT_ID(n_sequence_fields), \ - INIT_ID(n_unnamed_fields), \ - INIT_ID(name), \ - INIT_ID(name_from), \ - INIT_ID(namespace_separator), \ - INIT_ID(namespaces), \ - INIT_ID(narg), \ - INIT_ID(ndigits), \ - INIT_ID(new_limit), \ - INIT_ID(newline), \ - INIT_ID(newlines), \ - INIT_ID(next), \ - INIT_ID(node_depth), \ - INIT_ID(node_offset), \ - INIT_ID(ns), \ - INIT_ID(nstype), \ - INIT_ID(number), \ - INIT_ID(obj), \ - INIT_ID(object), \ - INIT_ID(offset), \ - INIT_ID(offset_dst), \ - INIT_ID(offset_src), \ - INIT_ID(on_type_read), \ - INIT_ID(onceregistry), \ - INIT_ID(only_keys), \ - INIT_ID(oparg), \ - INIT_ID(opcode), \ - INIT_ID(open), \ - INIT_ID(opener), \ - INIT_ID(operation), \ - INIT_ID(optimize), \ - INIT_ID(options), \ - INIT_ID(order), \ - INIT_ID(out_fd), \ - INIT_ID(outgoing), \ - INIT_ID(overlapped), \ - INIT_ID(owner), \ - INIT_ID(p), \ - INIT_ID(pages), \ - INIT_ID(parent), \ - INIT_ID(password), \ - INIT_ID(path), \ - INIT_ID(pattern), \ - INIT_ID(peek), \ - INIT_ID(persistent_id), \ - INIT_ID(persistent_load), \ - INIT_ID(person), \ - INIT_ID(pi_factory), \ - INIT_ID(pid), \ - INIT_ID(policy), \ - INIT_ID(pos), \ - INIT_ID(print_file_and_line), \ - INIT_ID(priority), \ - INIT_ID(progress), \ - INIT_ID(progress_handler), \ - INIT_ID(proto), \ - INIT_ID(protocol), \ - INIT_ID(ps1), \ - INIT_ID(ps2), \ - INIT_ID(query), \ - INIT_ID(quotetabs), \ - INIT_ID(r), \ - INIT_ID(raw), \ - INIT_ID(read), \ - INIT_ID(read1), \ - INIT_ID(readable), \ - INIT_ID(readall), \ - INIT_ID(readinto), \ - INIT_ID(readinto1), \ - INIT_ID(readline), \ - INIT_ID(readonly), \ - INIT_ID(real), \ - INIT_ID(reducer_override), \ - INIT_ID(registry), \ - INIT_ID(rel_tol), \ - INIT_ID(reload), \ - INIT_ID(repl), \ - INIT_ID(replace), \ - INIT_ID(reserved), \ - INIT_ID(reset), \ - INIT_ID(resetids), \ - INIT_ID(return), \ - INIT_ID(reverse), \ - INIT_ID(reversed), \ - INIT_ID(s), \ - INIT_ID(salt), \ - INIT_ID(sched_priority), \ - INIT_ID(scheduler), \ - INIT_ID(seek), \ - INIT_ID(seekable), \ - INIT_ID(selectors), \ - INIT_ID(send), \ - INIT_ID(sep), \ - INIT_ID(sequence), \ - INIT_ID(server_hostname), \ - INIT_ID(server_side), \ - INIT_ID(session), \ - INIT_ID(setcomp), \ - INIT_ID(setpgroup), \ - INIT_ID(setsid), \ - INIT_ID(setsigdef), \ - INIT_ID(setsigmask), \ - INIT_ID(setstate), \ - INIT_ID(shape), \ - INIT_ID(show_cmd), \ - INIT_ID(signed), \ - INIT_ID(size), \ - INIT_ID(sizehint), \ - INIT_ID(sleep), \ - INIT_ID(sock), \ - INIT_ID(sort), \ - INIT_ID(sound), \ - INIT_ID(source), \ - INIT_ID(src), \ - INIT_ID(src_dir_fd), \ - INIT_ID(stacklevel), \ - INIT_ID(start), \ - INIT_ID(statement), \ - INIT_ID(status), \ - INIT_ID(stderr), \ - INIT_ID(stdin), \ - INIT_ID(stdout), \ - INIT_ID(step), \ - INIT_ID(store_name), \ - INIT_ID(strategy), \ - INIT_ID(strict), \ - INIT_ID(strict_mode), \ - INIT_ID(string), \ - INIT_ID(sub_key), \ - INIT_ID(symmetric_difference_update), \ - INIT_ID(tabsize), \ - INIT_ID(tag), \ - INIT_ID(target), \ - INIT_ID(target_is_directory), \ - INIT_ID(task), \ - INIT_ID(tb_frame), \ - INIT_ID(tb_lasti), \ - INIT_ID(tb_lineno), \ - INIT_ID(tb_next), \ - INIT_ID(tell), \ - INIT_ID(template), \ - INIT_ID(term), \ - INIT_ID(text), \ - INIT_ID(threading), \ - INIT_ID(throw), \ - INIT_ID(timeout), \ - INIT_ID(times), \ - INIT_ID(top), \ - INIT_ID(trace_callback), \ - INIT_ID(traceback), \ - INIT_ID(trailers), \ - INIT_ID(translate), \ - INIT_ID(truncate), \ - INIT_ID(twice), \ - INIT_ID(txt), \ - INIT_ID(type), \ - INIT_ID(tz), \ - INIT_ID(uid), \ - INIT_ID(unlink), \ - INIT_ID(unraisablehook), \ - INIT_ID(uri), \ - INIT_ID(usedforsecurity), \ - INIT_ID(value), \ - INIT_ID(values), \ - INIT_ID(version), \ - INIT_ID(warnings), \ - INIT_ID(warnoptions), \ - INIT_ID(wbits), \ - INIT_ID(week), \ - INIT_ID(weekday), \ - INIT_ID(which), \ - INIT_ID(who), \ - INIT_ID(withdata), \ - INIT_ID(writable), \ - INIT_ID(write), \ - INIT_ID(write_through), \ - INIT_ID(x), \ - INIT_ID(year), \ - INIT_ID(zdict), \ - }, \ - .ascii = { \ - _PyASCIIObject_INIT("\x00"), \ - _PyASCIIObject_INIT("\x01"), \ - _PyASCIIObject_INIT("\x02"), \ - _PyASCIIObject_INIT("\x03"), \ - _PyASCIIObject_INIT("\x04"), \ - _PyASCIIObject_INIT("\x05"), \ - _PyASCIIObject_INIT("\x06"), \ - _PyASCIIObject_INIT("\x07"), \ - _PyASCIIObject_INIT("\x08"), \ - _PyASCIIObject_INIT("\x09"), \ - _PyASCIIObject_INIT("\x0a"), \ - _PyASCIIObject_INIT("\x0b"), \ - _PyASCIIObject_INIT("\x0c"), \ - _PyASCIIObject_INIT("\x0d"), \ - _PyASCIIObject_INIT("\x0e"), \ - _PyASCIIObject_INIT("\x0f"), \ - _PyASCIIObject_INIT("\x10"), \ - _PyASCIIObject_INIT("\x11"), \ - _PyASCIIObject_INIT("\x12"), \ - _PyASCIIObject_INIT("\x13"), \ - _PyASCIIObject_INIT("\x14"), \ - _PyASCIIObject_INIT("\x15"), \ - _PyASCIIObject_INIT("\x16"), \ - _PyASCIIObject_INIT("\x17"), \ - _PyASCIIObject_INIT("\x18"), \ - _PyASCIIObject_INIT("\x19"), \ - _PyASCIIObject_INIT("\x1a"), \ - _PyASCIIObject_INIT("\x1b"), \ - _PyASCIIObject_INIT("\x1c"), \ - _PyASCIIObject_INIT("\x1d"), \ - _PyASCIIObject_INIT("\x1e"), \ - _PyASCIIObject_INIT("\x1f"), \ - _PyASCIIObject_INIT("\x20"), \ - _PyASCIIObject_INIT("\x21"), \ - _PyASCIIObject_INIT("\x22"), \ - _PyASCIIObject_INIT("\x23"), \ - _PyASCIIObject_INIT("\x24"), \ - _PyASCIIObject_INIT("\x25"), \ - _PyASCIIObject_INIT("\x26"), \ - _PyASCIIObject_INIT("\x27"), \ - _PyASCIIObject_INIT("\x28"), \ - _PyASCIIObject_INIT("\x29"), \ - _PyASCIIObject_INIT("\x2a"), \ - _PyASCIIObject_INIT("\x2b"), \ - _PyASCIIObject_INIT("\x2c"), \ - _PyASCIIObject_INIT("\x2d"), \ - _PyASCIIObject_INIT("\x2e"), \ - _PyASCIIObject_INIT("\x2f"), \ - _PyASCIIObject_INIT("\x30"), \ - _PyASCIIObject_INIT("\x31"), \ - _PyASCIIObject_INIT("\x32"), \ - _PyASCIIObject_INIT("\x33"), \ - _PyASCIIObject_INIT("\x34"), \ - _PyASCIIObject_INIT("\x35"), \ - _PyASCIIObject_INIT("\x36"), \ - _PyASCIIObject_INIT("\x37"), \ - _PyASCIIObject_INIT("\x38"), \ - _PyASCIIObject_INIT("\x39"), \ - _PyASCIIObject_INIT("\x3a"), \ - _PyASCIIObject_INIT("\x3b"), \ - _PyASCIIObject_INIT("\x3c"), \ - _PyASCIIObject_INIT("\x3d"), \ - _PyASCIIObject_INIT("\x3e"), \ - _PyASCIIObject_INIT("\x3f"), \ - _PyASCIIObject_INIT("\x40"), \ - _PyASCIIObject_INIT("\x41"), \ - _PyASCIIObject_INIT("\x42"), \ - _PyASCIIObject_INIT("\x43"), \ - _PyASCIIObject_INIT("\x44"), \ - _PyASCIIObject_INIT("\x45"), \ - _PyASCIIObject_INIT("\x46"), \ - _PyASCIIObject_INIT("\x47"), \ - _PyASCIIObject_INIT("\x48"), \ - _PyASCIIObject_INIT("\x49"), \ - _PyASCIIObject_INIT("\x4a"), \ - _PyASCIIObject_INIT("\x4b"), \ - _PyASCIIObject_INIT("\x4c"), \ - _PyASCIIObject_INIT("\x4d"), \ - _PyASCIIObject_INIT("\x4e"), \ - _PyASCIIObject_INIT("\x4f"), \ - _PyASCIIObject_INIT("\x50"), \ - _PyASCIIObject_INIT("\x51"), \ - _PyASCIIObject_INIT("\x52"), \ - _PyASCIIObject_INIT("\x53"), \ - _PyASCIIObject_INIT("\x54"), \ - _PyASCIIObject_INIT("\x55"), \ - _PyASCIIObject_INIT("\x56"), \ - _PyASCIIObject_INIT("\x57"), \ - _PyASCIIObject_INIT("\x58"), \ - _PyASCIIObject_INIT("\x59"), \ - _PyASCIIObject_INIT("\x5a"), \ - _PyASCIIObject_INIT("\x5b"), \ - _PyASCIIObject_INIT("\x5c"), \ - _PyASCIIObject_INIT("\x5d"), \ - _PyASCIIObject_INIT("\x5e"), \ - _PyASCIIObject_INIT("\x5f"), \ - _PyASCIIObject_INIT("\x60"), \ - _PyASCIIObject_INIT("\x61"), \ - _PyASCIIObject_INIT("\x62"), \ - _PyASCIIObject_INIT("\x63"), \ - _PyASCIIObject_INIT("\x64"), \ - _PyASCIIObject_INIT("\x65"), \ - _PyASCIIObject_INIT("\x66"), \ - _PyASCIIObject_INIT("\x67"), \ - _PyASCIIObject_INIT("\x68"), \ - _PyASCIIObject_INIT("\x69"), \ - _PyASCIIObject_INIT("\x6a"), \ - _PyASCIIObject_INIT("\x6b"), \ - _PyASCIIObject_INIT("\x6c"), \ - _PyASCIIObject_INIT("\x6d"), \ - _PyASCIIObject_INIT("\x6e"), \ - _PyASCIIObject_INIT("\x6f"), \ - _PyASCIIObject_INIT("\x70"), \ - _PyASCIIObject_INIT("\x71"), \ - _PyASCIIObject_INIT("\x72"), \ - _PyASCIIObject_INIT("\x73"), \ - _PyASCIIObject_INIT("\x74"), \ - _PyASCIIObject_INIT("\x75"), \ - _PyASCIIObject_INIT("\x76"), \ - _PyASCIIObject_INIT("\x77"), \ - _PyASCIIObject_INIT("\x78"), \ - _PyASCIIObject_INIT("\x79"), \ - _PyASCIIObject_INIT("\x7a"), \ - _PyASCIIObject_INIT("\x7b"), \ - _PyASCIIObject_INIT("\x7c"), \ - _PyASCIIObject_INIT("\x7d"), \ - _PyASCIIObject_INIT("\x7e"), \ - _PyASCIIObject_INIT("\x7f"), \ - }, \ - .latin1 = { \ - _PyUnicode_LATIN1_INIT("\x80", "\xc2\x80"), \ - _PyUnicode_LATIN1_INIT("\x81", "\xc2\x81"), \ - _PyUnicode_LATIN1_INIT("\x82", "\xc2\x82"), \ - _PyUnicode_LATIN1_INIT("\x83", "\xc2\x83"), \ - _PyUnicode_LATIN1_INIT("\x84", "\xc2\x84"), \ - _PyUnicode_LATIN1_INIT("\x85", "\xc2\x85"), \ - _PyUnicode_LATIN1_INIT("\x86", "\xc2\x86"), \ - _PyUnicode_LATIN1_INIT("\x87", "\xc2\x87"), \ - _PyUnicode_LATIN1_INIT("\x88", "\xc2\x88"), \ - _PyUnicode_LATIN1_INIT("\x89", "\xc2\x89"), \ - _PyUnicode_LATIN1_INIT("\x8a", "\xc2\x8a"), \ - _PyUnicode_LATIN1_INIT("\x8b", "\xc2\x8b"), \ - _PyUnicode_LATIN1_INIT("\x8c", "\xc2\x8c"), \ - _PyUnicode_LATIN1_INIT("\x8d", "\xc2\x8d"), \ - _PyUnicode_LATIN1_INIT("\x8e", "\xc2\x8e"), \ - _PyUnicode_LATIN1_INIT("\x8f", "\xc2\x8f"), \ - _PyUnicode_LATIN1_INIT("\x90", "\xc2\x90"), \ - _PyUnicode_LATIN1_INIT("\x91", "\xc2\x91"), \ - _PyUnicode_LATIN1_INIT("\x92", "\xc2\x92"), \ - _PyUnicode_LATIN1_INIT("\x93", "\xc2\x93"), \ - _PyUnicode_LATIN1_INIT("\x94", "\xc2\x94"), \ - _PyUnicode_LATIN1_INIT("\x95", "\xc2\x95"), \ - _PyUnicode_LATIN1_INIT("\x96", "\xc2\x96"), \ - _PyUnicode_LATIN1_INIT("\x97", "\xc2\x97"), \ - _PyUnicode_LATIN1_INIT("\x98", "\xc2\x98"), \ - _PyUnicode_LATIN1_INIT("\x99", "\xc2\x99"), \ - _PyUnicode_LATIN1_INIT("\x9a", "\xc2\x9a"), \ - _PyUnicode_LATIN1_INIT("\x9b", "\xc2\x9b"), \ - _PyUnicode_LATIN1_INIT("\x9c", "\xc2\x9c"), \ - _PyUnicode_LATIN1_INIT("\x9d", "\xc2\x9d"), \ - _PyUnicode_LATIN1_INIT("\x9e", "\xc2\x9e"), \ - _PyUnicode_LATIN1_INIT("\x9f", "\xc2\x9f"), \ - _PyUnicode_LATIN1_INIT("\xa0", "\xc2\xa0"), \ - _PyUnicode_LATIN1_INIT("\xa1", "\xc2\xa1"), \ - _PyUnicode_LATIN1_INIT("\xa2", "\xc2\xa2"), \ - _PyUnicode_LATIN1_INIT("\xa3", "\xc2\xa3"), \ - _PyUnicode_LATIN1_INIT("\xa4", "\xc2\xa4"), \ - _PyUnicode_LATIN1_INIT("\xa5", "\xc2\xa5"), \ - _PyUnicode_LATIN1_INIT("\xa6", "\xc2\xa6"), \ - _PyUnicode_LATIN1_INIT("\xa7", "\xc2\xa7"), \ - _PyUnicode_LATIN1_INIT("\xa8", "\xc2\xa8"), \ - _PyUnicode_LATIN1_INIT("\xa9", "\xc2\xa9"), \ - _PyUnicode_LATIN1_INIT("\xaa", "\xc2\xaa"), \ - _PyUnicode_LATIN1_INIT("\xab", "\xc2\xab"), \ - _PyUnicode_LATIN1_INIT("\xac", "\xc2\xac"), \ - _PyUnicode_LATIN1_INIT("\xad", "\xc2\xad"), \ - _PyUnicode_LATIN1_INIT("\xae", "\xc2\xae"), \ - _PyUnicode_LATIN1_INIT("\xaf", "\xc2\xaf"), \ - _PyUnicode_LATIN1_INIT("\xb0", "\xc2\xb0"), \ - _PyUnicode_LATIN1_INIT("\xb1", "\xc2\xb1"), \ - _PyUnicode_LATIN1_INIT("\xb2", "\xc2\xb2"), \ - _PyUnicode_LATIN1_INIT("\xb3", "\xc2\xb3"), \ - _PyUnicode_LATIN1_INIT("\xb4", "\xc2\xb4"), \ - _PyUnicode_LATIN1_INIT("\xb5", "\xc2\xb5"), \ - _PyUnicode_LATIN1_INIT("\xb6", "\xc2\xb6"), \ - _PyUnicode_LATIN1_INIT("\xb7", "\xc2\xb7"), \ - _PyUnicode_LATIN1_INIT("\xb8", "\xc2\xb8"), \ - _PyUnicode_LATIN1_INIT("\xb9", "\xc2\xb9"), \ - _PyUnicode_LATIN1_INIT("\xba", "\xc2\xba"), \ - _PyUnicode_LATIN1_INIT("\xbb", "\xc2\xbb"), \ - _PyUnicode_LATIN1_INIT("\xbc", "\xc2\xbc"), \ - _PyUnicode_LATIN1_INIT("\xbd", "\xc2\xbd"), \ - _PyUnicode_LATIN1_INIT("\xbe", "\xc2\xbe"), \ - _PyUnicode_LATIN1_INIT("\xbf", "\xc2\xbf"), \ - _PyUnicode_LATIN1_INIT("\xc0", "\xc3\x80"), \ - _PyUnicode_LATIN1_INIT("\xc1", "\xc3\x81"), \ - _PyUnicode_LATIN1_INIT("\xc2", "\xc3\x82"), \ - _PyUnicode_LATIN1_INIT("\xc3", "\xc3\x83"), \ - _PyUnicode_LATIN1_INIT("\xc4", "\xc3\x84"), \ - _PyUnicode_LATIN1_INIT("\xc5", "\xc3\x85"), \ - _PyUnicode_LATIN1_INIT("\xc6", "\xc3\x86"), \ - _PyUnicode_LATIN1_INIT("\xc7", "\xc3\x87"), \ - _PyUnicode_LATIN1_INIT("\xc8", "\xc3\x88"), \ - _PyUnicode_LATIN1_INIT("\xc9", "\xc3\x89"), \ - _PyUnicode_LATIN1_INIT("\xca", "\xc3\x8a"), \ - _PyUnicode_LATIN1_INIT("\xcb", "\xc3\x8b"), \ - _PyUnicode_LATIN1_INIT("\xcc", "\xc3\x8c"), \ - _PyUnicode_LATIN1_INIT("\xcd", "\xc3\x8d"), \ - _PyUnicode_LATIN1_INIT("\xce", "\xc3\x8e"), \ - _PyUnicode_LATIN1_INIT("\xcf", "\xc3\x8f"), \ - _PyUnicode_LATIN1_INIT("\xd0", "\xc3\x90"), \ - _PyUnicode_LATIN1_INIT("\xd1", "\xc3\x91"), \ - _PyUnicode_LATIN1_INIT("\xd2", "\xc3\x92"), \ - _PyUnicode_LATIN1_INIT("\xd3", "\xc3\x93"), \ - _PyUnicode_LATIN1_INIT("\xd4", "\xc3\x94"), \ - _PyUnicode_LATIN1_INIT("\xd5", "\xc3\x95"), \ - _PyUnicode_LATIN1_INIT("\xd6", "\xc3\x96"), \ - _PyUnicode_LATIN1_INIT("\xd7", "\xc3\x97"), \ - _PyUnicode_LATIN1_INIT("\xd8", "\xc3\x98"), \ - _PyUnicode_LATIN1_INIT("\xd9", "\xc3\x99"), \ - _PyUnicode_LATIN1_INIT("\xda", "\xc3\x9a"), \ - _PyUnicode_LATIN1_INIT("\xdb", "\xc3\x9b"), \ - _PyUnicode_LATIN1_INIT("\xdc", "\xc3\x9c"), \ - _PyUnicode_LATIN1_INIT("\xdd", "\xc3\x9d"), \ - _PyUnicode_LATIN1_INIT("\xde", "\xc3\x9e"), \ - _PyUnicode_LATIN1_INIT("\xdf", "\xc3\x9f"), \ - _PyUnicode_LATIN1_INIT("\xe0", "\xc3\xa0"), \ - _PyUnicode_LATIN1_INIT("\xe1", "\xc3\xa1"), \ - _PyUnicode_LATIN1_INIT("\xe2", "\xc3\xa2"), \ - _PyUnicode_LATIN1_INIT("\xe3", "\xc3\xa3"), \ - _PyUnicode_LATIN1_INIT("\xe4", "\xc3\xa4"), \ - _PyUnicode_LATIN1_INIT("\xe5", "\xc3\xa5"), \ - _PyUnicode_LATIN1_INIT("\xe6", "\xc3\xa6"), \ - _PyUnicode_LATIN1_INIT("\xe7", "\xc3\xa7"), \ - _PyUnicode_LATIN1_INIT("\xe8", "\xc3\xa8"), \ - _PyUnicode_LATIN1_INIT("\xe9", "\xc3\xa9"), \ - _PyUnicode_LATIN1_INIT("\xea", "\xc3\xaa"), \ - _PyUnicode_LATIN1_INIT("\xeb", "\xc3\xab"), \ - _PyUnicode_LATIN1_INIT("\xec", "\xc3\xac"), \ - _PyUnicode_LATIN1_INIT("\xed", "\xc3\xad"), \ - _PyUnicode_LATIN1_INIT("\xee", "\xc3\xae"), \ - _PyUnicode_LATIN1_INIT("\xef", "\xc3\xaf"), \ - _PyUnicode_LATIN1_INIT("\xf0", "\xc3\xb0"), \ - _PyUnicode_LATIN1_INIT("\xf1", "\xc3\xb1"), \ - _PyUnicode_LATIN1_INIT("\xf2", "\xc3\xb2"), \ - _PyUnicode_LATIN1_INIT("\xf3", "\xc3\xb3"), \ - _PyUnicode_LATIN1_INIT("\xf4", "\xc3\xb4"), \ - _PyUnicode_LATIN1_INIT("\xf5", "\xc3\xb5"), \ - _PyUnicode_LATIN1_INIT("\xf6", "\xc3\xb6"), \ - _PyUnicode_LATIN1_INIT("\xf7", "\xc3\xb7"), \ - _PyUnicode_LATIN1_INIT("\xf8", "\xc3\xb8"), \ - _PyUnicode_LATIN1_INIT("\xf9", "\xc3\xb9"), \ - _PyUnicode_LATIN1_INIT("\xfa", "\xc3\xba"), \ - _PyUnicode_LATIN1_INIT("\xfb", "\xc3\xbb"), \ - _PyUnicode_LATIN1_INIT("\xfc", "\xc3\xbc"), \ - _PyUnicode_LATIN1_INIT("\xfd", "\xc3\xbd"), \ - _PyUnicode_LATIN1_INIT("\xfe", "\xc3\xbe"), \ - _PyUnicode_LATIN1_INIT("\xff", "\xc3\xbf"), \ - }, \ - }, \ - \ - .tuple_empty = { \ - .ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0) \ - }, \ - }, \ +#define _Py_small_ints_INIT { \ + _PyLong_DIGIT_INIT(-5), \ + _PyLong_DIGIT_INIT(-4), \ + _PyLong_DIGIT_INIT(-3), \ + _PyLong_DIGIT_INIT(-2), \ + _PyLong_DIGIT_INIT(-1), \ + _PyLong_DIGIT_INIT(0), \ + _PyLong_DIGIT_INIT(1), \ + _PyLong_DIGIT_INIT(2), \ + _PyLong_DIGIT_INIT(3), \ + _PyLong_DIGIT_INIT(4), \ + _PyLong_DIGIT_INIT(5), \ + _PyLong_DIGIT_INIT(6), \ + _PyLong_DIGIT_INIT(7), \ + _PyLong_DIGIT_INIT(8), \ + _PyLong_DIGIT_INIT(9), \ + _PyLong_DIGIT_INIT(10), \ + _PyLong_DIGIT_INIT(11), \ + _PyLong_DIGIT_INIT(12), \ + _PyLong_DIGIT_INIT(13), \ + _PyLong_DIGIT_INIT(14), \ + _PyLong_DIGIT_INIT(15), \ + _PyLong_DIGIT_INIT(16), \ + _PyLong_DIGIT_INIT(17), \ + _PyLong_DIGIT_INIT(18), \ + _PyLong_DIGIT_INIT(19), \ + _PyLong_DIGIT_INIT(20), \ + _PyLong_DIGIT_INIT(21), \ + _PyLong_DIGIT_INIT(22), \ + _PyLong_DIGIT_INIT(23), \ + _PyLong_DIGIT_INIT(24), \ + _PyLong_DIGIT_INIT(25), \ + _PyLong_DIGIT_INIT(26), \ + _PyLong_DIGIT_INIT(27), \ + _PyLong_DIGIT_INIT(28), \ + _PyLong_DIGIT_INIT(29), \ + _PyLong_DIGIT_INIT(30), \ + _PyLong_DIGIT_INIT(31), \ + _PyLong_DIGIT_INIT(32), \ + _PyLong_DIGIT_INIT(33), \ + _PyLong_DIGIT_INIT(34), \ + _PyLong_DIGIT_INIT(35), \ + _PyLong_DIGIT_INIT(36), \ + _PyLong_DIGIT_INIT(37), \ + _PyLong_DIGIT_INIT(38), \ + _PyLong_DIGIT_INIT(39), \ + _PyLong_DIGIT_INIT(40), \ + _PyLong_DIGIT_INIT(41), \ + _PyLong_DIGIT_INIT(42), \ + _PyLong_DIGIT_INIT(43), \ + _PyLong_DIGIT_INIT(44), \ + _PyLong_DIGIT_INIT(45), \ + _PyLong_DIGIT_INIT(46), \ + _PyLong_DIGIT_INIT(47), \ + _PyLong_DIGIT_INIT(48), \ + _PyLong_DIGIT_INIT(49), \ + _PyLong_DIGIT_INIT(50), \ + _PyLong_DIGIT_INIT(51), \ + _PyLong_DIGIT_INIT(52), \ + _PyLong_DIGIT_INIT(53), \ + _PyLong_DIGIT_INIT(54), \ + _PyLong_DIGIT_INIT(55), \ + _PyLong_DIGIT_INIT(56), \ + _PyLong_DIGIT_INIT(57), \ + _PyLong_DIGIT_INIT(58), \ + _PyLong_DIGIT_INIT(59), \ + _PyLong_DIGIT_INIT(60), \ + _PyLong_DIGIT_INIT(61), \ + _PyLong_DIGIT_INIT(62), \ + _PyLong_DIGIT_INIT(63), \ + _PyLong_DIGIT_INIT(64), \ + _PyLong_DIGIT_INIT(65), \ + _PyLong_DIGIT_INIT(66), \ + _PyLong_DIGIT_INIT(67), \ + _PyLong_DIGIT_INIT(68), \ + _PyLong_DIGIT_INIT(69), \ + _PyLong_DIGIT_INIT(70), \ + _PyLong_DIGIT_INIT(71), \ + _PyLong_DIGIT_INIT(72), \ + _PyLong_DIGIT_INIT(73), \ + _PyLong_DIGIT_INIT(74), \ + _PyLong_DIGIT_INIT(75), \ + _PyLong_DIGIT_INIT(76), \ + _PyLong_DIGIT_INIT(77), \ + _PyLong_DIGIT_INIT(78), \ + _PyLong_DIGIT_INIT(79), \ + _PyLong_DIGIT_INIT(80), \ + _PyLong_DIGIT_INIT(81), \ + _PyLong_DIGIT_INIT(82), \ + _PyLong_DIGIT_INIT(83), \ + _PyLong_DIGIT_INIT(84), \ + _PyLong_DIGIT_INIT(85), \ + _PyLong_DIGIT_INIT(86), \ + _PyLong_DIGIT_INIT(87), \ + _PyLong_DIGIT_INIT(88), \ + _PyLong_DIGIT_INIT(89), \ + _PyLong_DIGIT_INIT(90), \ + _PyLong_DIGIT_INIT(91), \ + _PyLong_DIGIT_INIT(92), \ + _PyLong_DIGIT_INIT(93), \ + _PyLong_DIGIT_INIT(94), \ + _PyLong_DIGIT_INIT(95), \ + _PyLong_DIGIT_INIT(96), \ + _PyLong_DIGIT_INIT(97), \ + _PyLong_DIGIT_INIT(98), \ + _PyLong_DIGIT_INIT(99), \ + _PyLong_DIGIT_INIT(100), \ + _PyLong_DIGIT_INIT(101), \ + _PyLong_DIGIT_INIT(102), \ + _PyLong_DIGIT_INIT(103), \ + _PyLong_DIGIT_INIT(104), \ + _PyLong_DIGIT_INIT(105), \ + _PyLong_DIGIT_INIT(106), \ + _PyLong_DIGIT_INIT(107), \ + _PyLong_DIGIT_INIT(108), \ + _PyLong_DIGIT_INIT(109), \ + _PyLong_DIGIT_INIT(110), \ + _PyLong_DIGIT_INIT(111), \ + _PyLong_DIGIT_INIT(112), \ + _PyLong_DIGIT_INIT(113), \ + _PyLong_DIGIT_INIT(114), \ + _PyLong_DIGIT_INIT(115), \ + _PyLong_DIGIT_INIT(116), \ + _PyLong_DIGIT_INIT(117), \ + _PyLong_DIGIT_INIT(118), \ + _PyLong_DIGIT_INIT(119), \ + _PyLong_DIGIT_INIT(120), \ + _PyLong_DIGIT_INIT(121), \ + _PyLong_DIGIT_INIT(122), \ + _PyLong_DIGIT_INIT(123), \ + _PyLong_DIGIT_INIT(124), \ + _PyLong_DIGIT_INIT(125), \ + _PyLong_DIGIT_INIT(126), \ + _PyLong_DIGIT_INIT(127), \ + _PyLong_DIGIT_INIT(128), \ + _PyLong_DIGIT_INIT(129), \ + _PyLong_DIGIT_INIT(130), \ + _PyLong_DIGIT_INIT(131), \ + _PyLong_DIGIT_INIT(132), \ + _PyLong_DIGIT_INIT(133), \ + _PyLong_DIGIT_INIT(134), \ + _PyLong_DIGIT_INIT(135), \ + _PyLong_DIGIT_INIT(136), \ + _PyLong_DIGIT_INIT(137), \ + _PyLong_DIGIT_INIT(138), \ + _PyLong_DIGIT_INIT(139), \ + _PyLong_DIGIT_INIT(140), \ + _PyLong_DIGIT_INIT(141), \ + _PyLong_DIGIT_INIT(142), \ + _PyLong_DIGIT_INIT(143), \ + _PyLong_DIGIT_INIT(144), \ + _PyLong_DIGIT_INIT(145), \ + _PyLong_DIGIT_INIT(146), \ + _PyLong_DIGIT_INIT(147), \ + _PyLong_DIGIT_INIT(148), \ + _PyLong_DIGIT_INIT(149), \ + _PyLong_DIGIT_INIT(150), \ + _PyLong_DIGIT_INIT(151), \ + _PyLong_DIGIT_INIT(152), \ + _PyLong_DIGIT_INIT(153), \ + _PyLong_DIGIT_INIT(154), \ + _PyLong_DIGIT_INIT(155), \ + _PyLong_DIGIT_INIT(156), \ + _PyLong_DIGIT_INIT(157), \ + _PyLong_DIGIT_INIT(158), \ + _PyLong_DIGIT_INIT(159), \ + _PyLong_DIGIT_INIT(160), \ + _PyLong_DIGIT_INIT(161), \ + _PyLong_DIGIT_INIT(162), \ + _PyLong_DIGIT_INIT(163), \ + _PyLong_DIGIT_INIT(164), \ + _PyLong_DIGIT_INIT(165), \ + _PyLong_DIGIT_INIT(166), \ + _PyLong_DIGIT_INIT(167), \ + _PyLong_DIGIT_INIT(168), \ + _PyLong_DIGIT_INIT(169), \ + _PyLong_DIGIT_INIT(170), \ + _PyLong_DIGIT_INIT(171), \ + _PyLong_DIGIT_INIT(172), \ + _PyLong_DIGIT_INIT(173), \ + _PyLong_DIGIT_INIT(174), \ + _PyLong_DIGIT_INIT(175), \ + _PyLong_DIGIT_INIT(176), \ + _PyLong_DIGIT_INIT(177), \ + _PyLong_DIGIT_INIT(178), \ + _PyLong_DIGIT_INIT(179), \ + _PyLong_DIGIT_INIT(180), \ + _PyLong_DIGIT_INIT(181), \ + _PyLong_DIGIT_INIT(182), \ + _PyLong_DIGIT_INIT(183), \ + _PyLong_DIGIT_INIT(184), \ + _PyLong_DIGIT_INIT(185), \ + _PyLong_DIGIT_INIT(186), \ + _PyLong_DIGIT_INIT(187), \ + _PyLong_DIGIT_INIT(188), \ + _PyLong_DIGIT_INIT(189), \ + _PyLong_DIGIT_INIT(190), \ + _PyLong_DIGIT_INIT(191), \ + _PyLong_DIGIT_INIT(192), \ + _PyLong_DIGIT_INIT(193), \ + _PyLong_DIGIT_INIT(194), \ + _PyLong_DIGIT_INIT(195), \ + _PyLong_DIGIT_INIT(196), \ + _PyLong_DIGIT_INIT(197), \ + _PyLong_DIGIT_INIT(198), \ + _PyLong_DIGIT_INIT(199), \ + _PyLong_DIGIT_INIT(200), \ + _PyLong_DIGIT_INIT(201), \ + _PyLong_DIGIT_INIT(202), \ + _PyLong_DIGIT_INIT(203), \ + _PyLong_DIGIT_INIT(204), \ + _PyLong_DIGIT_INIT(205), \ + _PyLong_DIGIT_INIT(206), \ + _PyLong_DIGIT_INIT(207), \ + _PyLong_DIGIT_INIT(208), \ + _PyLong_DIGIT_INIT(209), \ + _PyLong_DIGIT_INIT(210), \ + _PyLong_DIGIT_INIT(211), \ + _PyLong_DIGIT_INIT(212), \ + _PyLong_DIGIT_INIT(213), \ + _PyLong_DIGIT_INIT(214), \ + _PyLong_DIGIT_INIT(215), \ + _PyLong_DIGIT_INIT(216), \ + _PyLong_DIGIT_INIT(217), \ + _PyLong_DIGIT_INIT(218), \ + _PyLong_DIGIT_INIT(219), \ + _PyLong_DIGIT_INIT(220), \ + _PyLong_DIGIT_INIT(221), \ + _PyLong_DIGIT_INIT(222), \ + _PyLong_DIGIT_INIT(223), \ + _PyLong_DIGIT_INIT(224), \ + _PyLong_DIGIT_INIT(225), \ + _PyLong_DIGIT_INIT(226), \ + _PyLong_DIGIT_INIT(227), \ + _PyLong_DIGIT_INIT(228), \ + _PyLong_DIGIT_INIT(229), \ + _PyLong_DIGIT_INIT(230), \ + _PyLong_DIGIT_INIT(231), \ + _PyLong_DIGIT_INIT(232), \ + _PyLong_DIGIT_INIT(233), \ + _PyLong_DIGIT_INIT(234), \ + _PyLong_DIGIT_INIT(235), \ + _PyLong_DIGIT_INIT(236), \ + _PyLong_DIGIT_INIT(237), \ + _PyLong_DIGIT_INIT(238), \ + _PyLong_DIGIT_INIT(239), \ + _PyLong_DIGIT_INIT(240), \ + _PyLong_DIGIT_INIT(241), \ + _PyLong_DIGIT_INIT(242), \ + _PyLong_DIGIT_INIT(243), \ + _PyLong_DIGIT_INIT(244), \ + _PyLong_DIGIT_INIT(245), \ + _PyLong_DIGIT_INIT(246), \ + _PyLong_DIGIT_INIT(247), \ + _PyLong_DIGIT_INIT(248), \ + _PyLong_DIGIT_INIT(249), \ + _PyLong_DIGIT_INIT(250), \ + _PyLong_DIGIT_INIT(251), \ + _PyLong_DIGIT_INIT(252), \ + _PyLong_DIGIT_INIT(253), \ + _PyLong_DIGIT_INIT(254), \ + _PyLong_DIGIT_INIT(255), \ + _PyLong_DIGIT_INIT(256), \ } -static inline void -_PyUnicode_InitStaticStrings(void) { - PyObject *string; - string = &_Py_ID(False); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(Py_Repr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(TextIOWrapper); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(True); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(WarningMessage); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__IOBase_closed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__abc_tpflags__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__abs__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__abstractmethods__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__add__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__aenter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__aexit__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__aiter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__all__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__and__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__anext__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__annotations__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__args__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__await__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__bases__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__bool__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__build_class__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__builtins__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__bytes__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__call__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__cantrace__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__class__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__class_getitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__classcell__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__complex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__contains__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__copy__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__del__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__delattr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__delete__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__delitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__dict__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__dictoffset__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__dir__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__divmod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__doc__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__enter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__eq__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__exit__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__file__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__float__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__floordiv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__format__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__fspath__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ge__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__get__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getattr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getattribute__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getinitargs__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getnewargs__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getnewargs_ex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getstate__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__gt__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__hash__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__iadd__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__iand__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ifloordiv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ilshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__imatmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__imod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__import__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__imul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__index__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__init__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__init_subclass__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__instancecheck__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__int__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__invert__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ior__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ipow__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__irshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__isabstractmethod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__isub__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__iter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__itruediv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ixor__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__le__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__len__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__length_hint__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__lltrace__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__loader__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__lshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__lt__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__main__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__matmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__missing__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__mod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__module__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__mro_entries__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__mul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__name__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ne__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__neg__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__new__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__newobj__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__newobj_ex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__next__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__notes__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__or__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__orig_class__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__origin__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__package__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__parameters__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__path__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__pos__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__pow__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__prepare__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__qualname__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__radd__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rand__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rdivmod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__reduce__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__reduce_ex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__repr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__reversed__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rfloordiv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rlshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rmatmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rmod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ror__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__round__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rpow__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rrshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rsub__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rtruediv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rxor__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__set__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__set_name__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__setattr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__setitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__setstate__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__sizeof__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__slotnames__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__slots__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__spec__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__str__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__sub__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__subclasscheck__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__subclasshook__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__truediv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__trunc__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_is_unpacked_typevartuple__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_prepare_subst__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_subst__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_unpacked_tuple_args__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__warningregistry__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__weaklistoffset__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__weakref__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__xor__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_abc_impl); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_annotation); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_blksize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_bootstrap); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_dealloc_warn); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_feature_version); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_finalizing); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_find_and_load); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_fix_up_module); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_get_sourcefile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_handle_fromlist); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_initializing); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_is_text_encoding); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_lock_unlock_module); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_showwarnmsg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_shutdown); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_slotnames); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_uninitialized_submodules); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_warn_unawaited_coroutine); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_xoptions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(a); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(abs_tol); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(access); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(add); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(after_in_child); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(after_in_parent); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(aggregate_class); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(append); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(argdefs); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(arguments); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(argv); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(attribute); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(authorizer_callback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(b); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(backtick); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(base); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(before); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(big); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(binary_form); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(block); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffer); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffer_callback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffer_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffering); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffers); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(bufsize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(builtins); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(byteorder); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(bytes); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(bytes_per_sep); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(c_call); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(c_exception); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(c_return); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cached_statements); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cadata); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cafile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(call); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(capath); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(category); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cb_type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(certfile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(check_same_thread); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(clear); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(close); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(closed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(closefd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(closure); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_argcount); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_cellvars); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_code); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_consts); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_exceptiontable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_filename); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_firstlineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_flags); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_freevars); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_kwonlyargcount); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_linetable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_names); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_nlocals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_posonlyargcount); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_qualname); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_stacksize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_varnames); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(code); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(command); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(comment_factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(consts); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(context); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cookie); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(copy); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(copyreg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(coro); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(count); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cwd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(data); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(database); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(decode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(decoder); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(default); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(defaultaction); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(delete); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(depth); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(detect_types); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(deterministic); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(device); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dict); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dictcomp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(difference_update); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(digest); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(digest_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(digestmod); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dir_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dispatch_table); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(displayhook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dklen); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(doc); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dont_inherit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dst); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dst_dir_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(duration); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(effective_ids); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(element_factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(encode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(encoding); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(end); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(end_lineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(end_offset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(endpos); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(env); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(errors); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(event); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(eventmask); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exc_type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exc_value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(excepthook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exception); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(extend); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(facility); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(family); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fanout); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fd2); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fdel); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fget); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(file); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(file_actions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(filename); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fileno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(filepath); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fillvalue); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(filters); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(final); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(find_class); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fix_imports); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(flags); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(flush); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(follow_symlinks); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(format); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(frequency); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fromlist); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(func); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(generation); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(genexpr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(get); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(get_source); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(getattr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(getstate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(gid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(globals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(groupindex); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(groups); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(handle); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(hash_name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(header); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(headers); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(hi); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(hook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(id); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ident); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ignore); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(imag); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(importlib); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(in_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(incoming); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(indexgroup); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(inf); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(inheritable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initial); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initial_bytes); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initial_value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initval); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(inner_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(input); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(insert_comments); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(insert_pis); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(instructions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(intern); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(intersection); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(isatty); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(isinstance); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(isolation_level); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(istext); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(item); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(items); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(iter); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(iterable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(iterations); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(join); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(jump); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(keepends); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(key); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(keyfile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(keys); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(kind); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(lambda); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_node); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_traceback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(latin1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(leaf_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(len); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(length); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(level); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(limit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(line); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(line_buffering); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(lineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(listcomp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(little); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(lo); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(locale); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(locals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(logoption); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(loop); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mapping); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(match); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(max_length); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxdigits); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxevents); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxmem); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxsplit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxvalue); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(memLevel); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(memlimit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(message); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(metaclass); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(method); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mod); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(module); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(module_globals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(modules); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mro); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(msg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mycmp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_arg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_fields); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_sequence_fields); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_unnamed_fields); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(name_from); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(namespace_separator); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(namespaces); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(narg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ndigits); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(new_limit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(newline); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(newlines); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(next); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(node_depth); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(node_offset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ns); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(nstype); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(number); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(obj); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(object); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(offset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(offset_dst); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(offset_src); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(on_type_read); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(onceregistry); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(only_keys); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(oparg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(opcode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(open); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(opener); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(operation); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(optimize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(options); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(order); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(out_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(outgoing); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(overlapped); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(owner); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(p); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pages); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(parent); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(password); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(path); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pattern); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(peek); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(persistent_id); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(persistent_load); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(person); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pi_factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(policy); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pos); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(print_file_and_line); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(priority); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(progress); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(progress_handler); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(proto); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(protocol); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ps1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ps2); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(query); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(quotetabs); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(r); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(raw); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(read); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(read1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readall); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readinto); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readinto1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readline); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readonly); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(real); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reducer_override); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(registry); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(rel_tol); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reload); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(repl); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(replace); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reserved); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(resetids); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(return); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reverse); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reversed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(s); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(salt); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sched_priority); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(scheduler); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(seek); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(seekable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(selectors); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(send); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sep); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sequence); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(server_hostname); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(server_side); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(session); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setcomp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setpgroup); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setsid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setsigdef); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setsigmask); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setstate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(shape); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(show_cmd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(signed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sizehint); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sleep); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sock); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sort); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sound); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(source); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(src); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(src_dir_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stacklevel); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(start); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(statement); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(status); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stderr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stdin); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stdout); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(step); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(store_name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(strategy); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(strict); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(strict_mode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(string); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sub_key); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(symmetric_difference_update); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tabsize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tag); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(target); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(target_is_directory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(task); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_frame); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_lasti); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_lineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_next); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tell); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(template); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(term); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(text); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(threading); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(throw); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(timeout); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(times); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(top); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(trace_callback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(traceback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(trailers); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(translate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(truncate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(twice); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(txt); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tz); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(uid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(unlink); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(unraisablehook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(uri); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(usedforsecurity); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(values); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(version); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(warnings); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(warnoptions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(wbits); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(week); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(weekday); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(which); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(who); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(withdata); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(writable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(write); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(write_through); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(x); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(year); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(zdict); - PyUnicode_InternInPlace(&string); +#define _Py_bytes_characters_INIT { \ + _PyBytes_CHAR_INIT(0), \ + _PyBytes_CHAR_INIT(1), \ + _PyBytes_CHAR_INIT(2), \ + _PyBytes_CHAR_INIT(3), \ + _PyBytes_CHAR_INIT(4), \ + _PyBytes_CHAR_INIT(5), \ + _PyBytes_CHAR_INIT(6), \ + _PyBytes_CHAR_INIT(7), \ + _PyBytes_CHAR_INIT(8), \ + _PyBytes_CHAR_INIT(9), \ + _PyBytes_CHAR_INIT(10), \ + _PyBytes_CHAR_INIT(11), \ + _PyBytes_CHAR_INIT(12), \ + _PyBytes_CHAR_INIT(13), \ + _PyBytes_CHAR_INIT(14), \ + _PyBytes_CHAR_INIT(15), \ + _PyBytes_CHAR_INIT(16), \ + _PyBytes_CHAR_INIT(17), \ + _PyBytes_CHAR_INIT(18), \ + _PyBytes_CHAR_INIT(19), \ + _PyBytes_CHAR_INIT(20), \ + _PyBytes_CHAR_INIT(21), \ + _PyBytes_CHAR_INIT(22), \ + _PyBytes_CHAR_INIT(23), \ + _PyBytes_CHAR_INIT(24), \ + _PyBytes_CHAR_INIT(25), \ + _PyBytes_CHAR_INIT(26), \ + _PyBytes_CHAR_INIT(27), \ + _PyBytes_CHAR_INIT(28), \ + _PyBytes_CHAR_INIT(29), \ + _PyBytes_CHAR_INIT(30), \ + _PyBytes_CHAR_INIT(31), \ + _PyBytes_CHAR_INIT(32), \ + _PyBytes_CHAR_INIT(33), \ + _PyBytes_CHAR_INIT(34), \ + _PyBytes_CHAR_INIT(35), \ + _PyBytes_CHAR_INIT(36), \ + _PyBytes_CHAR_INIT(37), \ + _PyBytes_CHAR_INIT(38), \ + _PyBytes_CHAR_INIT(39), \ + _PyBytes_CHAR_INIT(40), \ + _PyBytes_CHAR_INIT(41), \ + _PyBytes_CHAR_INIT(42), \ + _PyBytes_CHAR_INIT(43), \ + _PyBytes_CHAR_INIT(44), \ + _PyBytes_CHAR_INIT(45), \ + _PyBytes_CHAR_INIT(46), \ + _PyBytes_CHAR_INIT(47), \ + _PyBytes_CHAR_INIT(48), \ + _PyBytes_CHAR_INIT(49), \ + _PyBytes_CHAR_INIT(50), \ + _PyBytes_CHAR_INIT(51), \ + _PyBytes_CHAR_INIT(52), \ + _PyBytes_CHAR_INIT(53), \ + _PyBytes_CHAR_INIT(54), \ + _PyBytes_CHAR_INIT(55), \ + _PyBytes_CHAR_INIT(56), \ + _PyBytes_CHAR_INIT(57), \ + _PyBytes_CHAR_INIT(58), \ + _PyBytes_CHAR_INIT(59), \ + _PyBytes_CHAR_INIT(60), \ + _PyBytes_CHAR_INIT(61), \ + _PyBytes_CHAR_INIT(62), \ + _PyBytes_CHAR_INIT(63), \ + _PyBytes_CHAR_INIT(64), \ + _PyBytes_CHAR_INIT(65), \ + _PyBytes_CHAR_INIT(66), \ + _PyBytes_CHAR_INIT(67), \ + _PyBytes_CHAR_INIT(68), \ + _PyBytes_CHAR_INIT(69), \ + _PyBytes_CHAR_INIT(70), \ + _PyBytes_CHAR_INIT(71), \ + _PyBytes_CHAR_INIT(72), \ + _PyBytes_CHAR_INIT(73), \ + _PyBytes_CHAR_INIT(74), \ + _PyBytes_CHAR_INIT(75), \ + _PyBytes_CHAR_INIT(76), \ + _PyBytes_CHAR_INIT(77), \ + _PyBytes_CHAR_INIT(78), \ + _PyBytes_CHAR_INIT(79), \ + _PyBytes_CHAR_INIT(80), \ + _PyBytes_CHAR_INIT(81), \ + _PyBytes_CHAR_INIT(82), \ + _PyBytes_CHAR_INIT(83), \ + _PyBytes_CHAR_INIT(84), \ + _PyBytes_CHAR_INIT(85), \ + _PyBytes_CHAR_INIT(86), \ + _PyBytes_CHAR_INIT(87), \ + _PyBytes_CHAR_INIT(88), \ + _PyBytes_CHAR_INIT(89), \ + _PyBytes_CHAR_INIT(90), \ + _PyBytes_CHAR_INIT(91), \ + _PyBytes_CHAR_INIT(92), \ + _PyBytes_CHAR_INIT(93), \ + _PyBytes_CHAR_INIT(94), \ + _PyBytes_CHAR_INIT(95), \ + _PyBytes_CHAR_INIT(96), \ + _PyBytes_CHAR_INIT(97), \ + _PyBytes_CHAR_INIT(98), \ + _PyBytes_CHAR_INIT(99), \ + _PyBytes_CHAR_INIT(100), \ + _PyBytes_CHAR_INIT(101), \ + _PyBytes_CHAR_INIT(102), \ + _PyBytes_CHAR_INIT(103), \ + _PyBytes_CHAR_INIT(104), \ + _PyBytes_CHAR_INIT(105), \ + _PyBytes_CHAR_INIT(106), \ + _PyBytes_CHAR_INIT(107), \ + _PyBytes_CHAR_INIT(108), \ + _PyBytes_CHAR_INIT(109), \ + _PyBytes_CHAR_INIT(110), \ + _PyBytes_CHAR_INIT(111), \ + _PyBytes_CHAR_INIT(112), \ + _PyBytes_CHAR_INIT(113), \ + _PyBytes_CHAR_INIT(114), \ + _PyBytes_CHAR_INIT(115), \ + _PyBytes_CHAR_INIT(116), \ + _PyBytes_CHAR_INIT(117), \ + _PyBytes_CHAR_INIT(118), \ + _PyBytes_CHAR_INIT(119), \ + _PyBytes_CHAR_INIT(120), \ + _PyBytes_CHAR_INIT(121), \ + _PyBytes_CHAR_INIT(122), \ + _PyBytes_CHAR_INIT(123), \ + _PyBytes_CHAR_INIT(124), \ + _PyBytes_CHAR_INIT(125), \ + _PyBytes_CHAR_INIT(126), \ + _PyBytes_CHAR_INIT(127), \ + _PyBytes_CHAR_INIT(128), \ + _PyBytes_CHAR_INIT(129), \ + _PyBytes_CHAR_INIT(130), \ + _PyBytes_CHAR_INIT(131), \ + _PyBytes_CHAR_INIT(132), \ + _PyBytes_CHAR_INIT(133), \ + _PyBytes_CHAR_INIT(134), \ + _PyBytes_CHAR_INIT(135), \ + _PyBytes_CHAR_INIT(136), \ + _PyBytes_CHAR_INIT(137), \ + _PyBytes_CHAR_INIT(138), \ + _PyBytes_CHAR_INIT(139), \ + _PyBytes_CHAR_INIT(140), \ + _PyBytes_CHAR_INIT(141), \ + _PyBytes_CHAR_INIT(142), \ + _PyBytes_CHAR_INIT(143), \ + _PyBytes_CHAR_INIT(144), \ + _PyBytes_CHAR_INIT(145), \ + _PyBytes_CHAR_INIT(146), \ + _PyBytes_CHAR_INIT(147), \ + _PyBytes_CHAR_INIT(148), \ + _PyBytes_CHAR_INIT(149), \ + _PyBytes_CHAR_INIT(150), \ + _PyBytes_CHAR_INIT(151), \ + _PyBytes_CHAR_INIT(152), \ + _PyBytes_CHAR_INIT(153), \ + _PyBytes_CHAR_INIT(154), \ + _PyBytes_CHAR_INIT(155), \ + _PyBytes_CHAR_INIT(156), \ + _PyBytes_CHAR_INIT(157), \ + _PyBytes_CHAR_INIT(158), \ + _PyBytes_CHAR_INIT(159), \ + _PyBytes_CHAR_INIT(160), \ + _PyBytes_CHAR_INIT(161), \ + _PyBytes_CHAR_INIT(162), \ + _PyBytes_CHAR_INIT(163), \ + _PyBytes_CHAR_INIT(164), \ + _PyBytes_CHAR_INIT(165), \ + _PyBytes_CHAR_INIT(166), \ + _PyBytes_CHAR_INIT(167), \ + _PyBytes_CHAR_INIT(168), \ + _PyBytes_CHAR_INIT(169), \ + _PyBytes_CHAR_INIT(170), \ + _PyBytes_CHAR_INIT(171), \ + _PyBytes_CHAR_INIT(172), \ + _PyBytes_CHAR_INIT(173), \ + _PyBytes_CHAR_INIT(174), \ + _PyBytes_CHAR_INIT(175), \ + _PyBytes_CHAR_INIT(176), \ + _PyBytes_CHAR_INIT(177), \ + _PyBytes_CHAR_INIT(178), \ + _PyBytes_CHAR_INIT(179), \ + _PyBytes_CHAR_INIT(180), \ + _PyBytes_CHAR_INIT(181), \ + _PyBytes_CHAR_INIT(182), \ + _PyBytes_CHAR_INIT(183), \ + _PyBytes_CHAR_INIT(184), \ + _PyBytes_CHAR_INIT(185), \ + _PyBytes_CHAR_INIT(186), \ + _PyBytes_CHAR_INIT(187), \ + _PyBytes_CHAR_INIT(188), \ + _PyBytes_CHAR_INIT(189), \ + _PyBytes_CHAR_INIT(190), \ + _PyBytes_CHAR_INIT(191), \ + _PyBytes_CHAR_INIT(192), \ + _PyBytes_CHAR_INIT(193), \ + _PyBytes_CHAR_INIT(194), \ + _PyBytes_CHAR_INIT(195), \ + _PyBytes_CHAR_INIT(196), \ + _PyBytes_CHAR_INIT(197), \ + _PyBytes_CHAR_INIT(198), \ + _PyBytes_CHAR_INIT(199), \ + _PyBytes_CHAR_INIT(200), \ + _PyBytes_CHAR_INIT(201), \ + _PyBytes_CHAR_INIT(202), \ + _PyBytes_CHAR_INIT(203), \ + _PyBytes_CHAR_INIT(204), \ + _PyBytes_CHAR_INIT(205), \ + _PyBytes_CHAR_INIT(206), \ + _PyBytes_CHAR_INIT(207), \ + _PyBytes_CHAR_INIT(208), \ + _PyBytes_CHAR_INIT(209), \ + _PyBytes_CHAR_INIT(210), \ + _PyBytes_CHAR_INIT(211), \ + _PyBytes_CHAR_INIT(212), \ + _PyBytes_CHAR_INIT(213), \ + _PyBytes_CHAR_INIT(214), \ + _PyBytes_CHAR_INIT(215), \ + _PyBytes_CHAR_INIT(216), \ + _PyBytes_CHAR_INIT(217), \ + _PyBytes_CHAR_INIT(218), \ + _PyBytes_CHAR_INIT(219), \ + _PyBytes_CHAR_INIT(220), \ + _PyBytes_CHAR_INIT(221), \ + _PyBytes_CHAR_INIT(222), \ + _PyBytes_CHAR_INIT(223), \ + _PyBytes_CHAR_INIT(224), \ + _PyBytes_CHAR_INIT(225), \ + _PyBytes_CHAR_INIT(226), \ + _PyBytes_CHAR_INIT(227), \ + _PyBytes_CHAR_INIT(228), \ + _PyBytes_CHAR_INIT(229), \ + _PyBytes_CHAR_INIT(230), \ + _PyBytes_CHAR_INIT(231), \ + _PyBytes_CHAR_INIT(232), \ + _PyBytes_CHAR_INIT(233), \ + _PyBytes_CHAR_INIT(234), \ + _PyBytes_CHAR_INIT(235), \ + _PyBytes_CHAR_INIT(236), \ + _PyBytes_CHAR_INIT(237), \ + _PyBytes_CHAR_INIT(238), \ + _PyBytes_CHAR_INIT(239), \ + _PyBytes_CHAR_INIT(240), \ + _PyBytes_CHAR_INIT(241), \ + _PyBytes_CHAR_INIT(242), \ + _PyBytes_CHAR_INIT(243), \ + _PyBytes_CHAR_INIT(244), \ + _PyBytes_CHAR_INIT(245), \ + _PyBytes_CHAR_INIT(246), \ + _PyBytes_CHAR_INIT(247), \ + _PyBytes_CHAR_INIT(248), \ + _PyBytes_CHAR_INIT(249), \ + _PyBytes_CHAR_INIT(250), \ + _PyBytes_CHAR_INIT(251), \ + _PyBytes_CHAR_INIT(252), \ + _PyBytes_CHAR_INIT(253), \ + _PyBytes_CHAR_INIT(254), \ + _PyBytes_CHAR_INIT(255), \ } -#ifdef Py_DEBUG -static inline void -_PyStaticObjects_CheckRefcnt(void) { - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 0]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 0]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 6]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 6]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 7]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 7]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 8]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 8]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 9]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 9]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 10]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 10]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 11]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 11]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 12]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 12]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 13]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 13]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 14]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 14]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 15]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 15]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 16]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 16]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 17]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 17]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 18]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 18]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 19]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 19]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 20]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 20]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 21]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 21]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 22]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 22]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 23]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 23]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 24]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 24]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 25]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 25]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 26]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 26]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 27]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 27]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 28]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 28]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 29]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 29]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 30]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 30]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 31]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 31]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 32]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 32]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 33]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 33]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 34]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 34]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 35]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 35]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 36]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 36]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 37]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 37]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 38]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 38]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 39]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 39]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 40]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 40]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 41]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 41]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 42]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 42]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 43]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 43]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 44]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 44]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 45]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 45]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 46]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 46]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 47]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 47]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 48]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 48]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 49]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 49]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 50]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 50]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 51]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 51]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 52]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 52]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 53]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 53]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 54]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 54]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 55]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 55]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 56]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 56]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 57]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 57]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 58]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 58]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 59]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 59]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 60]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 60]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 61]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 61]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 62]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 62]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 63]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 63]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 64]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 64]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 65]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 65]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 66]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 66]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 67]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 67]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 68]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 68]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 69]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 69]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 70]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 70]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 71]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 71]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 72]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 72]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 73]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 73]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 74]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 74]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 75]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 75]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 76]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 76]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 77]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 77]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 78]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 78]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 79]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 79]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 80]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 80]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 81]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 81]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 82]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 82]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 83]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 83]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 84]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 84]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 85]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 85]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 86]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 86]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 87]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 87]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 88]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 88]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 89]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 89]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 90]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 90]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 91]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 91]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 92]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 92]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 93]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 93]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 94]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 94]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 95]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 95]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 96]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 96]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 97]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 97]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 98]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 98]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 99]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 99]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 100]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 100]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 101]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 101]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 102]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 102]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 103]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 103]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 104]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 104]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 105]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 105]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 106]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 106]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 107]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 107]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 108]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 108]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 109]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 109]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 110]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 110]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 111]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 111]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 112]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 112]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 113]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 113]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 114]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 114]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 115]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 115]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 116]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 116]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 117]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 117]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 118]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 118]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 119]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 119]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 120]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 120]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 121]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 121]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 122]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 122]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 123]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 123]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 124]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 124]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 125]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 125]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 126]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 126]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 127]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 127]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 129]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 129]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 130]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 130]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 131]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 131]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 132]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 132]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 133]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 133]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 134]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 134]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 135]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 135]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 136]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 136]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 137]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 137]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 138]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 138]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 139]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 139]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 140]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 140]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 141]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 141]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 142]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 142]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 143]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 143]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 144]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 144]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 145]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 145]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 146]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 146]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 147]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 147]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 148]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 148]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 149]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 149]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 150]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 150]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 151]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 151]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 152]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 152]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 153]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 153]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 154]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 154]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 155]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 155]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 156]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 156]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 157]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 157]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 158]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 158]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 159]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 159]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 160]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 160]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 161]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 161]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 162]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 162]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 163]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 163]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 164]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 164]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 165]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 165]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 166]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 166]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 167]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 167]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 168]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 168]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 169]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 169]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 170]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 170]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 171]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 171]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 172]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 172]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 173]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 173]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 174]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 174]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 175]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 175]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 176]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 176]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 177]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 177]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 178]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 178]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 179]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 179]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 180]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 180]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 181]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 181]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 182]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 182]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 183]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 183]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 184]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 184]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 185]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 185]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 186]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 186]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 187]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 187]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 188]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 188]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 189]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 189]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 190]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 190]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 191]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 191]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 192]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 192]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 193]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 193]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 194]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 194]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 195]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 195]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 196]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 196]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 197]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 197]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 198]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 198]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 199]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 199]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 200]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 200]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 201]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 201]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 202]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 202]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 203]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 203]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 204]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 204]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 205]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 205]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 206]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 206]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 207]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 207]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 208]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 208]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 209]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 209]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 210]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 210]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 211]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 211]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 212]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 212]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 213]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 213]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 214]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 214]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 215]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 215]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 216]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 216]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 217]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 217]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 218]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 218]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 219]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 219]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 220]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 220]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 221]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 221]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 222]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 222]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 223]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 223]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 224]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 224]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 225]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 225]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 226]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 226]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 227]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 227]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 228]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 228]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 229]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 229]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 230]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 230]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 231]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 231]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 232]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 232]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 233]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 233]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 234]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 234]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 235]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 235]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 236]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 236]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 237]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 237]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 238]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 238]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 239]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 239]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 240]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 240]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 241]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 241]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 242]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 242]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 243]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 243]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 244]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 244]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 245]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 245]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 246]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 246]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 247]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 247]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 248]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 248]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 249]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 249]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 250]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 250]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 251]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 251]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 252]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 252]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 253]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 253]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 254]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 254]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 255]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 255]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 256]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 256]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_empty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_empty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[0]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[0]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[6]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[6]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[7]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[7]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[8]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[8]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[9]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[9]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[10]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[10]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[11]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[11]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[12]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[12]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[13]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[13]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[14]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[14]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[15]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[15]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[16]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[16]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[17]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[17]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[18]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[18]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[19]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[19]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[20]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[20]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[21]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[21]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[22]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[22]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[23]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[23]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[24]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[24]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[25]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[25]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[26]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[26]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[27]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[27]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[28]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[28]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[29]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[29]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[30]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[30]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[31]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[31]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[32]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[32]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[33]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[33]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[34]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[34]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[35]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[35]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[36]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[36]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[37]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[37]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[38]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[38]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[39]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[39]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[40]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[40]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[41]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[41]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[42]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[42]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[43]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[43]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[44]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[44]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[45]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[45]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[46]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[46]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[47]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[47]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[48]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[48]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[49]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[49]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[50]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[50]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[51]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[51]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[52]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[52]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[53]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[53]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[54]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[54]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[55]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[55]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[56]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[56]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[57]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[57]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[58]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[58]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[59]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[59]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[60]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[60]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[61]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[61]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[62]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[62]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[63]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[63]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[64]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[64]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[65]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[65]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[66]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[66]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[67]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[67]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[68]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[68]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[69]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[69]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[70]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[70]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[71]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[71]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[72]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[72]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[73]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[73]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[74]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[74]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[75]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[75]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[76]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[76]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[77]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[77]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[78]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[78]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[79]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[79]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[80]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[80]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[81]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[81]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[82]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[82]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[83]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[83]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[84]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[84]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[85]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[85]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[86]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[86]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[87]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[87]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[88]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[88]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[89]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[89]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[90]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[90]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[91]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[91]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[92]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[92]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[93]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[93]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[94]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[94]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[95]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[95]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[96]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[96]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[97]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[97]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[98]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[98]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[99]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[99]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[100]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[100]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[101]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[101]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[102]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[102]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[103]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[103]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[104]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[104]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[105]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[105]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[106]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[106]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[107]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[107]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[108]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[108]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[109]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[109]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[110]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[110]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[111]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[111]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[112]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[112]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[113]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[113]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[114]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[114]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[115]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[115]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[116]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[116]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[117]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[117]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[118]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[118]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[119]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[119]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[120]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[120]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[121]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[121]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[122]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[122]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[123]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[123]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[124]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[124]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[125]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[125]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[126]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[126]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[127]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[127]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[129]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[129]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[130]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[130]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[131]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[131]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[132]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[132]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[133]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[133]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[134]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[134]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[135]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[135]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[136]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[136]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[137]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[137]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[138]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[138]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[139]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[139]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[140]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[140]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[141]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[141]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[142]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[142]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[143]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[143]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[144]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[144]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[145]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[145]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[146]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[146]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[147]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[147]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[148]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[148]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[149]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[149]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[150]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[150]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[151]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[151]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[152]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[152]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[153]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[153]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[154]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[154]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[155]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[155]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[156]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[156]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[157]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[157]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[158]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[158]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[159]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[159]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[160]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[160]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[161]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[161]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[162]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[162]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[163]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[163]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[164]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[164]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[165]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[165]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[166]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[166]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[167]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[167]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[168]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[168]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[169]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[169]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[170]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[170]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[171]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[171]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[172]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[172]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[173]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[173]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[174]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[174]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[175]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[175]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[176]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[176]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[177]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[177]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[178]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[178]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[179]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[179]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[180]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[180]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[181]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[181]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[182]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[182]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[183]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[183]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[184]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[184]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[185]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[185]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[186]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[186]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[187]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[187]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[188]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[188]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[189]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[189]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[190]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[190]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[191]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[191]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[192]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[192]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[193]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[193]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[194]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[194]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[195]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[195]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[196]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[196]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[197]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[197]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[198]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[198]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[199]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[199]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[200]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[200]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[201]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[201]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[202]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[202]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[203]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[203]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[204]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[204]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[205]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[205]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[206]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[206]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[207]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[207]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[208]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[208]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[209]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[209]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[210]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[210]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[211]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[211]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[212]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[212]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[213]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[213]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[214]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[214]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[215]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[215]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[216]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[216]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[217]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[217]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[218]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[218]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[219]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[219]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[220]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[220]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[221]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[221]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[222]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[222]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[223]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[223]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[224]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[224]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[225]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[225]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[226]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[226]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[227]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[227]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[228]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[228]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[229]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[229]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[230]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[230]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[231]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[231]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[232]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[232]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[233]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[233]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[234]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[234]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[235]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[235]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[236]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[236]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[237]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[237]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[238]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[238]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[239]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[239]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[240]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[240]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[241]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[241]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[242]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[242]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[243]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[243]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[244]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[244]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[245]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[245]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[246]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[246]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[247]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[247]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[248]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[248]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[249]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[249]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[250]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[250]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[251]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[251]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[252]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[252]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[253]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[253]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[254]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[254]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[255]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[255]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_dictcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_dictcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_genexpr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_genexpr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_lambda)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_lambda)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_listcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_listcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_setcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_setcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_string)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_string)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_unknown)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_unknown)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(close_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(close_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(comma_sep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(comma_sep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dbl_close_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dbl_close_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dbl_open_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dbl_open_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dbl_percent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dbl_percent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dot)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dot)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dot_locals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dot_locals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(empty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(empty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(list_err)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(list_err)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(newline)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(newline)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(open_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(open_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(percent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(percent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(utf_8)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(utf_8)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(False)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(False)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(Py_Repr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(Py_Repr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(TextIOWrapper)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(TextIOWrapper)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(True)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(True)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(WarningMessage)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(WarningMessage)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__IOBase_closed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__IOBase_closed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__abc_tpflags__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__abc_tpflags__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__abs__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__abs__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__abstractmethods__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__abstractmethods__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__add__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__add__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__aenter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__aenter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__aexit__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__aexit__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__aiter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__aiter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__all__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__all__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__and__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__and__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__anext__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__anext__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__annotations__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__annotations__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__args__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__args__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__await__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__await__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__bases__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__bases__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__bool__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__bool__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__build_class__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__build_class__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__builtins__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__builtins__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__bytes__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__bytes__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__call__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__call__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__cantrace__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__cantrace__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__class__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__class__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__class_getitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__class_getitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__classcell__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__classcell__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__complex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__complex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__contains__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__contains__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__copy__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__copy__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__del__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__del__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__delattr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__delattr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__delete__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__delete__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__delitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__delitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__dict__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__dict__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__dictoffset__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__dictoffset__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__dir__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__dir__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__divmod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__divmod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__doc__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__doc__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__enter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__enter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__eq__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__eq__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__exit__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__exit__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__file__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__file__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__float__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__float__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__floordiv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__floordiv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__format__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__format__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__fspath__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__fspath__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ge__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ge__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__get__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__get__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getattr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getattr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getattribute__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getattribute__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getinitargs__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getinitargs__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getnewargs__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getnewargs__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getnewargs_ex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getnewargs_ex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getstate__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getstate__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__gt__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__gt__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__hash__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__hash__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__iadd__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__iadd__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__iand__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__iand__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ifloordiv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ifloordiv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ilshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ilshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__imatmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__imatmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__imod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__imod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__import__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__import__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__imul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__imul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__index__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__index__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__init__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__init__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__init_subclass__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__init_subclass__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__instancecheck__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__instancecheck__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__int__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__int__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__invert__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__invert__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ior__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ior__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ipow__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ipow__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__irshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__irshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__isabstractmethod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__isabstractmethod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__isub__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__isub__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__iter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__iter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__itruediv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__itruediv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ixor__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ixor__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__le__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__le__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__len__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__len__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__length_hint__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__length_hint__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__lltrace__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__lltrace__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__loader__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__loader__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__lshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__lshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__lt__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__lt__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__main__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__main__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__matmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__matmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__missing__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__missing__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__mod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__mod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__module__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__module__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__mro_entries__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__mro_entries__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__mul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__mul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__name__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__name__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ne__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ne__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__neg__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__neg__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__new__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__new__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__newobj__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__newobj__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__newobj_ex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__newobj_ex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__next__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__next__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__notes__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__notes__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__or__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__or__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__orig_class__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__orig_class__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__origin__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__origin__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__package__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__package__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__parameters__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__parameters__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__path__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__path__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__pos__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__pos__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__pow__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__pow__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__prepare__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__prepare__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__qualname__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__qualname__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__radd__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__radd__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rand__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rand__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rdivmod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rdivmod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__reduce__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__reduce__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__reduce_ex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__reduce_ex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__repr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__repr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__reversed__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__reversed__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rfloordiv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rfloordiv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rlshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rlshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rmatmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rmatmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rmod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rmod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ror__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ror__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__round__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__round__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rpow__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rpow__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rrshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rrshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rsub__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rsub__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rtruediv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rtruediv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rxor__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rxor__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__set__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__set__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__set_name__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__set_name__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__setattr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__setattr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__setitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__setitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__setstate__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__setstate__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__sizeof__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__sizeof__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__slotnames__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__slotnames__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__slots__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__slots__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__spec__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__spec__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__str__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__str__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__sub__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__sub__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__subclasscheck__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__subclasscheck__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__subclasshook__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__subclasshook__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__truediv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__truediv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__trunc__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__trunc__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_is_unpacked_typevartuple__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_is_unpacked_typevartuple__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_prepare_subst__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_prepare_subst__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_subst__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_subst__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_unpacked_tuple_args__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_unpacked_tuple_args__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__warningregistry__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__warningregistry__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__weaklistoffset__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__weaklistoffset__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__weakref__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__weakref__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__xor__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__xor__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_abc_impl)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_abc_impl)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_annotation)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_annotation)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_blksize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_blksize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_bootstrap)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_bootstrap)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_dealloc_warn)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_dealloc_warn)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_feature_version)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_feature_version)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_finalizing)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_finalizing)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_find_and_load)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_find_and_load)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_fix_up_module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_fix_up_module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_get_sourcefile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_get_sourcefile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_handle_fromlist)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_handle_fromlist)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_initializing)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_initializing)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_is_text_encoding)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_is_text_encoding)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_lock_unlock_module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_lock_unlock_module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_showwarnmsg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_showwarnmsg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_shutdown)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_shutdown)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_slotnames)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_slotnames)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_uninitialized_submodules)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_uninitialized_submodules)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_warn_unawaited_coroutine)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_warn_unawaited_coroutine)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_xoptions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_xoptions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(a)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(a)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(abs_tol)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(abs_tol)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(access)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(access)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(add)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(add)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(after_in_child)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(after_in_child)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(after_in_parent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(after_in_parent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(aggregate_class)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(aggregate_class)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(append)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(append)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(argdefs)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(argdefs)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(arguments)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(arguments)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(argv)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(argv)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(attribute)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(attribute)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(authorizer_callback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(authorizer_callback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(b)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(b)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(backtick)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(backtick)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(base)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(base)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(before)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(before)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(big)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(big)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(binary_form)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(binary_form)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(block)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(block)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffer)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffer)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffer_callback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffer_callback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffer_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffer_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffering)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffering)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffers)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffers)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(bufsize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(bufsize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(builtins)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(builtins)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(byteorder)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(byteorder)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(bytes)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(bytes)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(bytes_per_sep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(bytes_per_sep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(c_call)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(c_call)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(c_exception)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(c_exception)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(c_return)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(c_return)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cached_statements)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cached_statements)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cadata)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cadata)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cafile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cafile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(call)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(call)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(capath)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(capath)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(category)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(category)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cb_type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cb_type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(certfile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(certfile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(check_same_thread)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(check_same_thread)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(clear)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(clear)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(close)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(close)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(closed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(closed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(closefd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(closefd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(closure)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(closure)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_argcount)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_argcount)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_cellvars)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_cellvars)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_code)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_code)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_consts)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_consts)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_exceptiontable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_exceptiontable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_filename)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_filename)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_firstlineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_firstlineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_flags)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_flags)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_freevars)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_freevars)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_kwonlyargcount)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_kwonlyargcount)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_linetable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_linetable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_names)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_names)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_nlocals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_nlocals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_posonlyargcount)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_posonlyargcount)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_qualname)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_qualname)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_stacksize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_stacksize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_varnames)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_varnames)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(code)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(code)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(command)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(command)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(comment_factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(comment_factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(consts)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(consts)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(context)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(context)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cookie)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cookie)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(copy)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(copy)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(copyreg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(copyreg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(coro)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(coro)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(count)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(count)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cwd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cwd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(data)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(data)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(database)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(database)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(decode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(decode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(decoder)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(decoder)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(default)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(default)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(defaultaction)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(defaultaction)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(delete)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(delete)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(depth)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(depth)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(detect_types)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(detect_types)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(deterministic)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(deterministic)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(device)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(device)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dict)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dict)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dictcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dictcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(difference_update)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(difference_update)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(digest)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(digest)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(digest_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(digest_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(digestmod)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(digestmod)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dir_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dir_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dispatch_table)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dispatch_table)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(displayhook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(displayhook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dklen)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dklen)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(doc)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(doc)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dont_inherit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dont_inherit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dst)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dst)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dst_dir_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dst_dir_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(duration)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(duration)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(effective_ids)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(effective_ids)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(element_factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(element_factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(encode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(encode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(encoding)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(encoding)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(end)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(end)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(end_lineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(end_lineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(end_offset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(end_offset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(endpos)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(endpos)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(env)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(env)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(errors)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(errors)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(event)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(event)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(eventmask)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(eventmask)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exc_type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exc_type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exc_value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exc_value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(excepthook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(excepthook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exception)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exception)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(extend)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(extend)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(facility)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(facility)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(family)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(family)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fanout)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fanout)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fd2)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fd2)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fdel)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fdel)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fget)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fget)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(file)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(file)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(file_actions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(file_actions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(filename)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(filename)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fileno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fileno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(filepath)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(filepath)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fillvalue)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fillvalue)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(filters)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(filters)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(final)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(final)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(find_class)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(find_class)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fix_imports)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fix_imports)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(flags)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(flags)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(flush)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(flush)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(follow_symlinks)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(follow_symlinks)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(format)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(format)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(frequency)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(frequency)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fromlist)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fromlist)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(func)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(func)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(generation)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(generation)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(genexpr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(genexpr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(get)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(get)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(get_source)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(get_source)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(getattr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(getattr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(getstate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(getstate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(gid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(gid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(globals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(globals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(groupindex)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(groupindex)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(groups)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(groups)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(handle)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(handle)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(hash_name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(hash_name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(header)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(header)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(headers)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(headers)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(hi)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(hi)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(hook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(hook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(id)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(id)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ident)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ident)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ignore)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ignore)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(imag)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(imag)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(importlib)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(importlib)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(in_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(in_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(incoming)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(incoming)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(indexgroup)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(indexgroup)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(inf)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(inf)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(inheritable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(inheritable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initial)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initial)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initial_bytes)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initial_bytes)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initial_value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initial_value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initval)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initval)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(inner_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(inner_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(input)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(input)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(insert_comments)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(insert_comments)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(insert_pis)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(insert_pis)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(instructions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(instructions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(intern)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(intern)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(intersection)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(intersection)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(isatty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(isatty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(isinstance)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(isinstance)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(isolation_level)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(isolation_level)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(istext)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(istext)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(item)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(item)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(items)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(items)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(iter)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(iter)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(iterable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(iterable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(iterations)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(iterations)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(join)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(join)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(jump)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(jump)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(keepends)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(keepends)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(key)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(key)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(keyfile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(keyfile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(keys)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(keys)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(kind)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(kind)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(lambda)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(lambda)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_node)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_node)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_traceback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_traceback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(latin1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(latin1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(leaf_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(leaf_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(len)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(len)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(length)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(length)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(level)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(level)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(limit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(limit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(line)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(line)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(line_buffering)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(line_buffering)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(lineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(lineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(listcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(listcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(little)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(little)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(lo)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(lo)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(locale)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(locale)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(locals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(locals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(logoption)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(logoption)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(loop)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(loop)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mapping)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mapping)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(match)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(match)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(max_length)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(max_length)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxdigits)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxdigits)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxevents)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxevents)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxmem)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxmem)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxsplit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxsplit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxvalue)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxvalue)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(memLevel)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(memLevel)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(memlimit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(memlimit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(message)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(message)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(metaclass)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(metaclass)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(method)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(method)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mod)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mod)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(module_globals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(module_globals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(modules)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(modules)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mro)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mro)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(msg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(msg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mycmp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mycmp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_arg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_arg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_fields)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_fields)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_sequence_fields)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_sequence_fields)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_unnamed_fields)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_unnamed_fields)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(name_from)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(name_from)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(namespace_separator)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(namespace_separator)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(namespaces)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(namespaces)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(narg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(narg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ndigits)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ndigits)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(new_limit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(new_limit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(newline)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(newline)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(newlines)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(newlines)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(next)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(next)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(node_depth)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(node_depth)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(node_offset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(node_offset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ns)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ns)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(nstype)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(nstype)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(number)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(number)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(obj)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(obj)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(object)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(object)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(offset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(offset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(offset_dst)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(offset_dst)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(offset_src)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(offset_src)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(on_type_read)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(on_type_read)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(onceregistry)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(onceregistry)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(only_keys)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(only_keys)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(oparg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(oparg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(opcode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(opcode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(open)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(open)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(opener)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(opener)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(operation)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(operation)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(optimize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(optimize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(options)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(options)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(order)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(order)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(out_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(out_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(outgoing)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(outgoing)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(overlapped)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(overlapped)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(owner)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(owner)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(p)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(p)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pages)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pages)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(parent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(parent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(password)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(password)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(path)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(path)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pattern)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pattern)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(peek)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(peek)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(persistent_id)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(persistent_id)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(persistent_load)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(persistent_load)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(person)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(person)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pi_factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pi_factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(policy)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(policy)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pos)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pos)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(print_file_and_line)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(print_file_and_line)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(priority)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(priority)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(progress)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(progress)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(progress_handler)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(progress_handler)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(proto)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(proto)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(protocol)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(protocol)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ps1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ps1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ps2)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ps2)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(query)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(query)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(quotetabs)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(quotetabs)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(r)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(r)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(raw)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(raw)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(read)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(read)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(read1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(read1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readall)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readall)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readinto)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readinto)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readinto1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readinto1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readline)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readline)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readonly)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readonly)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(real)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(real)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reducer_override)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reducer_override)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(registry)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(registry)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(rel_tol)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(rel_tol)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reload)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reload)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(repl)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(repl)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(replace)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(replace)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reserved)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reserved)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(resetids)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(resetids)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(return)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(return)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reverse)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reverse)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reversed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reversed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(s)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(s)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(salt)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(salt)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sched_priority)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sched_priority)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(scheduler)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(scheduler)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(seek)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(seek)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(seekable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(seekable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(selectors)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(selectors)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(send)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(send)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sequence)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sequence)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(server_hostname)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(server_hostname)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(server_side)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(server_side)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(session)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(session)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setpgroup)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setpgroup)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setsid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setsid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setsigdef)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setsigdef)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setsigmask)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setsigmask)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setstate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setstate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(shape)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(shape)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(show_cmd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(show_cmd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(signed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(signed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sizehint)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sizehint)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sleep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sleep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sock)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sock)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sort)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sort)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sound)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sound)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(source)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(source)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(src)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(src)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(src_dir_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(src_dir_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stacklevel)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stacklevel)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(start)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(start)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(statement)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(statement)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(status)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(status)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stderr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stderr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stdin)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stdin)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stdout)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stdout)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(step)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(step)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(store_name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(store_name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(strategy)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(strategy)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(strict)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(strict)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(strict_mode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(strict_mode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(string)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(string)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sub_key)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sub_key)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(symmetric_difference_update)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(symmetric_difference_update)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tabsize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tabsize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tag)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tag)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(target)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(target)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(target_is_directory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(target_is_directory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(task)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(task)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_frame)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_frame)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_lasti)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_lasti)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_lineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_lineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_next)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_next)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tell)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tell)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(template)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(template)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(term)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(term)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(text)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(text)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(threading)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(threading)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(throw)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(throw)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(timeout)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(timeout)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(times)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(times)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(top)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(top)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(trace_callback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(trace_callback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(traceback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(traceback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(trailers)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(trailers)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(translate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(translate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(truncate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(truncate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(twice)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(twice)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(txt)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(txt)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tz)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tz)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(uid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(uid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(unlink)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(unlink)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(unraisablehook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(unraisablehook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(uri)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(uri)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(usedforsecurity)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(usedforsecurity)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(values)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(values)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(version)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(version)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(warnings)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(warnings)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(warnoptions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(warnoptions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(wbits)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(wbits)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(week)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(week)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(weekday)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(weekday)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(which)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(which)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(who)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(who)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(withdata)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(withdata)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(writable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(writable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(write)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(write)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(write_through)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(write_through)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(x)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(x)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(year)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(year)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(zdict)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(zdict)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[0]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[0]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[6]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[6]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[7]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[7]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[8]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[8]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[9]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[9]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[10]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[10]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[11]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[11]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[12]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[12]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[13]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[13]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[14]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[14]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[15]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[15]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[16]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[16]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[17]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[17]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[18]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[18]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[19]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[19]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[20]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[20]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[21]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[21]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[22]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[22]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[23]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[23]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[24]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[24]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[25]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[25]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[26]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[26]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[27]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[27]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[28]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[28]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[29]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[29]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[30]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[30]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[31]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[31]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[32]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[32]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[33]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[33]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[34]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[34]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[35]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[35]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[36]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[36]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[37]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[37]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[38]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[38]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[39]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[39]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[40]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[40]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[41]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[41]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[42]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[42]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[43]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[43]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[44]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[44]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[45]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[45]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[46]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[46]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[47]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[47]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[48]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[48]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[49]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[49]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[50]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[50]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[51]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[51]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[52]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[52]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[53]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[53]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[54]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[54]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[55]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[55]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[56]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[56]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[57]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[57]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[58]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[58]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[59]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[59]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[60]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[60]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[61]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[61]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[62]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[62]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[63]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[63]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[64]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[64]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[65]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[65]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[66]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[66]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[67]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[67]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[68]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[68]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[69]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[69]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[70]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[70]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[71]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[71]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[72]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[72]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[73]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[73]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[74]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[74]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[75]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[75]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[76]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[76]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[77]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[77]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[78]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[78]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[79]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[79]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[80]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[80]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[81]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[81]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[82]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[82]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[83]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[83]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[84]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[84]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[85]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[85]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[86]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[86]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[87]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[87]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[88]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[88]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[89]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[89]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[90]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[90]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[91]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[91]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[92]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[92]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[93]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[93]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[94]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[94]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[95]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[95]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[96]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[96]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[97]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[97]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[98]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[98]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[99]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[99]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[100]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[100]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[101]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[101]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[102]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[102]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[103]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[103]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[104]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[104]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[105]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[105]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[106]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[106]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[107]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[107]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[108]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[108]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[109]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[109]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[110]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[110]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[111]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[111]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[112]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[112]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[113]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[113]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[114]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[114]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[115]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[115]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[116]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[116]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[117]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[117]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[118]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[118]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[119]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[119]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[120]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[120]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[121]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[121]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[122]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[122]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[123]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[123]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[124]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[124]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[125]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[125]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[126]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[126]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[127]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[127]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[128 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[128 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[129 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[129 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[130 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[130 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[131 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[131 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[132 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[132 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[133 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[133 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[134 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[134 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[135 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[135 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[136 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[136 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[137 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[137 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[138 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[138 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[139 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[139 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[140 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[140 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[141 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[141 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[142 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[142 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[143 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[143 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[144 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[144 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[145 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[145 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[146 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[146 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[147 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[147 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[148 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[148 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[149 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[149 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[150 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[150 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[151 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[151 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[152 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[152 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[153 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[153 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[154 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[154 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[155 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[155 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[156 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[156 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[157 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[157 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[158 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[158 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[159 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[159 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[160 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[160 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[161 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[161 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[162 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[162 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[163 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[163 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[164 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[164 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[165 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[165 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[166 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[166 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[167 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[167 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[168 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[168 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[169 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[169 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[170 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[170 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[171 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[171 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[172 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[172 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[173 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[173 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[174 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[174 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[175 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[175 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[176 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[176 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[177 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[177 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[178 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[178 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[179 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[179 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[180 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[180 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[181 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[181 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[182 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[182 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[183 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[183 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[184 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[184 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[185 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[185 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[186 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[186 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[187 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[187 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[188 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[188 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[189 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[189 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[190 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[190 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[191 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[191 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[192 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[192 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[193 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[193 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[194 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[194 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[195 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[195 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[196 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[196 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[197 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[197 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[198 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[198 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[199 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[199 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[200 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[200 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[201 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[201 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[202 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[202 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[203 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[203 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[204 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[204 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[205 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[205 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[206 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[206 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[207 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[207 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[208 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[208 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[209 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[209 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[210 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[210 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[211 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[211 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[212 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[212 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[213 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[213 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[214 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[214 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[215 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[215 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[216 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[216 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[217 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[217 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[218 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[218 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[219 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[219 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[220 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[220 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[221 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[221 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[222 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[222 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[223 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[223 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[224 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[224 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[225 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[225 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[226 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[226 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[227 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[227 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[228 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[228 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[229 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[229 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[230 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[230 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[231 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[231 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[232 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[232 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[233 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[233 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[234 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[234 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[235 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[235 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[236 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[236 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[237 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[237 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[238 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[238 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[239 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[239 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[240 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[240 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[241 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[241 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[242 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[242 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[243 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[243 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[244 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[244 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[245 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[245 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[246 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[246 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[247 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[247 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[248 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[248 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[249 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[249 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[250 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[250 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[251 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[251 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[252 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[252 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[253 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[253 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[254 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[254 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[255 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[255 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(tuple_empty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(tuple_empty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; +#define _Py_str_literals_INIT { \ + INIT_STR(anon_dictcomp, "<dictcomp>"), \ + INIT_STR(anon_genexpr, "<genexpr>"), \ + INIT_STR(anon_lambda, "<lambda>"), \ + INIT_STR(anon_listcomp, "<listcomp>"), \ + INIT_STR(anon_module, "<module>"), \ + INIT_STR(anon_setcomp, "<setcomp>"), \ + INIT_STR(anon_string, "<string>"), \ + INIT_STR(anon_unknown, "<unknown>"), \ + INIT_STR(close_br, "}"), \ + INIT_STR(dbl_close_br, "}}"), \ + INIT_STR(dbl_open_br, "{{"), \ + INIT_STR(dbl_percent, "%%"), \ + INIT_STR(dot, "."), \ + INIT_STR(dot_locals, ".<locals>"), \ + INIT_STR(empty, ""), \ + INIT_STR(json_decoder, "json.decoder"), \ + INIT_STR(list_err, "list index out of range"), \ + INIT_STR(newline, "\n"), \ + INIT_STR(open_br, "{"), \ + INIT_STR(percent, "%"), \ + INIT_STR(shim_name, "<shim>"), \ + INIT_STR(utf_8, "utf-8"), \ +} + +#define _Py_str_identifiers_INIT { \ + INIT_ID(CANCELLED), \ + INIT_ID(FINISHED), \ + INIT_ID(False), \ + INIT_ID(JSONDecodeError), \ + INIT_ID(PENDING), \ + INIT_ID(Py_Repr), \ + INIT_ID(TextIOWrapper), \ + INIT_ID(True), \ + INIT_ID(WarningMessage), \ + INIT_ID(_), \ + INIT_ID(__IOBase_closed), \ + INIT_ID(__abc_tpflags__), \ + INIT_ID(__abs__), \ + INIT_ID(__abstractmethods__), \ + INIT_ID(__add__), \ + INIT_ID(__aenter__), \ + INIT_ID(__aexit__), \ + INIT_ID(__aiter__), \ + INIT_ID(__all__), \ + INIT_ID(__and__), \ + INIT_ID(__anext__), \ + INIT_ID(__annotations__), \ + INIT_ID(__args__), \ + INIT_ID(__asyncio_running_event_loop__), \ + INIT_ID(__await__), \ + INIT_ID(__bases__), \ + INIT_ID(__bool__), \ + INIT_ID(__build_class__), \ + INIT_ID(__builtins__), \ + INIT_ID(__bytes__), \ + INIT_ID(__call__), \ + INIT_ID(__cantrace__), \ + INIT_ID(__class__), \ + INIT_ID(__class_getitem__), \ + INIT_ID(__classcell__), \ + INIT_ID(__complex__), \ + INIT_ID(__contains__), \ + INIT_ID(__copy__), \ + INIT_ID(__ctypes_from_outparam__), \ + INIT_ID(__del__), \ + INIT_ID(__delattr__), \ + INIT_ID(__delete__), \ + INIT_ID(__delitem__), \ + INIT_ID(__dict__), \ + INIT_ID(__dictoffset__), \ + INIT_ID(__dir__), \ + INIT_ID(__divmod__), \ + INIT_ID(__doc__), \ + INIT_ID(__enter__), \ + INIT_ID(__eq__), \ + INIT_ID(__exit__), \ + INIT_ID(__file__), \ + INIT_ID(__float__), \ + INIT_ID(__floordiv__), \ + INIT_ID(__format__), \ + INIT_ID(__fspath__), \ + INIT_ID(__ge__), \ + INIT_ID(__get__), \ + INIT_ID(__getattr__), \ + INIT_ID(__getattribute__), \ + INIT_ID(__getinitargs__), \ + INIT_ID(__getitem__), \ + INIT_ID(__getnewargs__), \ + INIT_ID(__getnewargs_ex__), \ + INIT_ID(__getstate__), \ + INIT_ID(__gt__), \ + INIT_ID(__hash__), \ + INIT_ID(__iadd__), \ + INIT_ID(__iand__), \ + INIT_ID(__ifloordiv__), \ + INIT_ID(__ilshift__), \ + INIT_ID(__imatmul__), \ + INIT_ID(__imod__), \ + INIT_ID(__import__), \ + INIT_ID(__imul__), \ + INIT_ID(__index__), \ + INIT_ID(__init__), \ + INIT_ID(__init_subclass__), \ + INIT_ID(__instancecheck__), \ + INIT_ID(__int__), \ + INIT_ID(__invert__), \ + INIT_ID(__ior__), \ + INIT_ID(__ipow__), \ + INIT_ID(__irshift__), \ + INIT_ID(__isabstractmethod__), \ + INIT_ID(__isub__), \ + INIT_ID(__iter__), \ + INIT_ID(__itruediv__), \ + INIT_ID(__ixor__), \ + INIT_ID(__le__), \ + INIT_ID(__len__), \ + INIT_ID(__length_hint__), \ + INIT_ID(__lltrace__), \ + INIT_ID(__loader__), \ + INIT_ID(__lshift__), \ + INIT_ID(__lt__), \ + INIT_ID(__main__), \ + INIT_ID(__matmul__), \ + INIT_ID(__missing__), \ + INIT_ID(__mod__), \ + INIT_ID(__module__), \ + INIT_ID(__mro_entries__), \ + INIT_ID(__mul__), \ + INIT_ID(__name__), \ + INIT_ID(__ne__), \ + INIT_ID(__neg__), \ + INIT_ID(__new__), \ + INIT_ID(__newobj__), \ + INIT_ID(__newobj_ex__), \ + INIT_ID(__next__), \ + INIT_ID(__notes__), \ + INIT_ID(__or__), \ + INIT_ID(__orig_class__), \ + INIT_ID(__origin__), \ + INIT_ID(__package__), \ + INIT_ID(__parameters__), \ + INIT_ID(__path__), \ + INIT_ID(__pos__), \ + INIT_ID(__pow__), \ + INIT_ID(__prepare__), \ + INIT_ID(__qualname__), \ + INIT_ID(__radd__), \ + INIT_ID(__rand__), \ + INIT_ID(__rdivmod__), \ + INIT_ID(__reduce__), \ + INIT_ID(__reduce_ex__), \ + INIT_ID(__repr__), \ + INIT_ID(__reversed__), \ + INIT_ID(__rfloordiv__), \ + INIT_ID(__rlshift__), \ + INIT_ID(__rmatmul__), \ + INIT_ID(__rmod__), \ + INIT_ID(__rmul__), \ + INIT_ID(__ror__), \ + INIT_ID(__round__), \ + INIT_ID(__rpow__), \ + INIT_ID(__rrshift__), \ + INIT_ID(__rshift__), \ + INIT_ID(__rsub__), \ + INIT_ID(__rtruediv__), \ + INIT_ID(__rxor__), \ + INIT_ID(__set__), \ + INIT_ID(__set_name__), \ + INIT_ID(__setattr__), \ + INIT_ID(__setitem__), \ + INIT_ID(__setstate__), \ + INIT_ID(__sizeof__), \ + INIT_ID(__slotnames__), \ + INIT_ID(__slots__), \ + INIT_ID(__spec__), \ + INIT_ID(__str__), \ + INIT_ID(__sub__), \ + INIT_ID(__subclasscheck__), \ + INIT_ID(__subclasshook__), \ + INIT_ID(__truediv__), \ + INIT_ID(__trunc__), \ + INIT_ID(__typing_is_unpacked_typevartuple__), \ + INIT_ID(__typing_prepare_subst__), \ + INIT_ID(__typing_subst__), \ + INIT_ID(__typing_unpacked_tuple_args__), \ + INIT_ID(__warningregistry__), \ + INIT_ID(__weaklistoffset__), \ + INIT_ID(__weakref__), \ + INIT_ID(__xor__), \ + INIT_ID(_abc_impl), \ + INIT_ID(_abstract_), \ + INIT_ID(_annotation), \ + INIT_ID(_anonymous_), \ + INIT_ID(_argtypes_), \ + INIT_ID(_as_parameter_), \ + INIT_ID(_asyncio_future_blocking), \ + INIT_ID(_blksize), \ + INIT_ID(_bootstrap), \ + INIT_ID(_check_retval_), \ + INIT_ID(_dealloc_warn), \ + INIT_ID(_feature_version), \ + INIT_ID(_fields_), \ + INIT_ID(_finalizing), \ + INIT_ID(_find_and_load), \ + INIT_ID(_fix_up_module), \ + INIT_ID(_flags_), \ + INIT_ID(_get_sourcefile), \ + INIT_ID(_handle_fromlist), \ + INIT_ID(_initializing), \ + INIT_ID(_is_text_encoding), \ + INIT_ID(_length_), \ + INIT_ID(_lock_unlock_module), \ + INIT_ID(_loop), \ + INIT_ID(_needs_com_addref_), \ + INIT_ID(_pack_), \ + INIT_ID(_restype_), \ + INIT_ID(_showwarnmsg), \ + INIT_ID(_shutdown), \ + INIT_ID(_slotnames), \ + INIT_ID(_strptime_datetime), \ + INIT_ID(_swappedbytes_), \ + INIT_ID(_type_), \ + INIT_ID(_uninitialized_submodules), \ + INIT_ID(_warn_unawaited_coroutine), \ + INIT_ID(_xoptions), \ + INIT_ID(a), \ + INIT_ID(abs_tol), \ + INIT_ID(access), \ + INIT_ID(add), \ + INIT_ID(add_done_callback), \ + INIT_ID(after_in_child), \ + INIT_ID(after_in_parent), \ + INIT_ID(aggregate_class), \ + INIT_ID(append), \ + INIT_ID(argdefs), \ + INIT_ID(arguments), \ + INIT_ID(argv), \ + INIT_ID(as_integer_ratio), \ + INIT_ID(ast), \ + INIT_ID(attribute), \ + INIT_ID(authorizer_callback), \ + INIT_ID(autocommit), \ + INIT_ID(b), \ + INIT_ID(backtick), \ + INIT_ID(base), \ + INIT_ID(before), \ + INIT_ID(big), \ + INIT_ID(binary_form), \ + INIT_ID(block), \ + INIT_ID(buffer), \ + INIT_ID(buffer_callback), \ + INIT_ID(buffer_size), \ + INIT_ID(buffering), \ + INIT_ID(buffers), \ + INIT_ID(bufsize), \ + INIT_ID(builtins), \ + INIT_ID(byteorder), \ + INIT_ID(bytes), \ + INIT_ID(bytes_per_sep), \ + INIT_ID(c), \ + INIT_ID(c_call), \ + INIT_ID(c_exception), \ + INIT_ID(c_return), \ + INIT_ID(cached_statements), \ + INIT_ID(cadata), \ + INIT_ID(cafile), \ + INIT_ID(call), \ + INIT_ID(call_exception_handler), \ + INIT_ID(call_soon), \ + INIT_ID(cancel), \ + INIT_ID(capath), \ + INIT_ID(category), \ + INIT_ID(cb_type), \ + INIT_ID(certfile), \ + INIT_ID(check_same_thread), \ + INIT_ID(clear), \ + INIT_ID(close), \ + INIT_ID(closed), \ + INIT_ID(closefd), \ + INIT_ID(closure), \ + INIT_ID(co_argcount), \ + INIT_ID(co_cellvars), \ + INIT_ID(co_code), \ + INIT_ID(co_consts), \ + INIT_ID(co_exceptiontable), \ + INIT_ID(co_filename), \ + INIT_ID(co_firstlineno), \ + INIT_ID(co_flags), \ + INIT_ID(co_freevars), \ + INIT_ID(co_kwonlyargcount), \ + INIT_ID(co_linetable), \ + INIT_ID(co_name), \ + INIT_ID(co_names), \ + INIT_ID(co_nlocals), \ + INIT_ID(co_posonlyargcount), \ + INIT_ID(co_qualname), \ + INIT_ID(co_stacksize), \ + INIT_ID(co_varnames), \ + INIT_ID(code), \ + INIT_ID(command), \ + INIT_ID(comment_factory), \ + INIT_ID(consts), \ + INIT_ID(context), \ + INIT_ID(cookie), \ + INIT_ID(copy), \ + INIT_ID(copyreg), \ + INIT_ID(coro), \ + INIT_ID(count), \ + INIT_ID(cwd), \ + INIT_ID(d), \ + INIT_ID(data), \ + INIT_ID(database), \ + INIT_ID(decode), \ + INIT_ID(decoder), \ + INIT_ID(default), \ + INIT_ID(defaultaction), \ + INIT_ID(delete), \ + INIT_ID(depth), \ + INIT_ID(detect_types), \ + INIT_ID(deterministic), \ + INIT_ID(device), \ + INIT_ID(dict), \ + INIT_ID(dictcomp), \ + INIT_ID(difference_update), \ + INIT_ID(digest), \ + INIT_ID(digest_size), \ + INIT_ID(digestmod), \ + INIT_ID(dir_fd), \ + INIT_ID(discard), \ + INIT_ID(dispatch_table), \ + INIT_ID(displayhook), \ + INIT_ID(dklen), \ + INIT_ID(doc), \ + INIT_ID(dont_inherit), \ + INIT_ID(dst), \ + INIT_ID(dst_dir_fd), \ + INIT_ID(duration), \ + INIT_ID(e), \ + INIT_ID(effective_ids), \ + INIT_ID(element_factory), \ + INIT_ID(encode), \ + INIT_ID(encoding), \ + INIT_ID(end), \ + INIT_ID(end_lineno), \ + INIT_ID(end_offset), \ + INIT_ID(endpos), \ + INIT_ID(env), \ + INIT_ID(errors), \ + INIT_ID(event), \ + INIT_ID(eventmask), \ + INIT_ID(exc_type), \ + INIT_ID(exc_value), \ + INIT_ID(excepthook), \ + INIT_ID(exception), \ + INIT_ID(exp), \ + INIT_ID(extend), \ + INIT_ID(facility), \ + INIT_ID(factory), \ + INIT_ID(false), \ + INIT_ID(family), \ + INIT_ID(fanout), \ + INIT_ID(fd), \ + INIT_ID(fd2), \ + INIT_ID(fdel), \ + INIT_ID(fget), \ + INIT_ID(file), \ + INIT_ID(file_actions), \ + INIT_ID(filename), \ + INIT_ID(fileno), \ + INIT_ID(filepath), \ + INIT_ID(fillvalue), \ + INIT_ID(filters), \ + INIT_ID(final), \ + INIT_ID(find_class), \ + INIT_ID(fix_imports), \ + INIT_ID(flags), \ + INIT_ID(flush), \ + INIT_ID(follow_symlinks), \ + INIT_ID(format), \ + INIT_ID(frequency), \ + INIT_ID(from_param), \ + INIT_ID(fromlist), \ + INIT_ID(fromtimestamp), \ + INIT_ID(fromutc), \ + INIT_ID(fset), \ + INIT_ID(func), \ + INIT_ID(future), \ + INIT_ID(generation), \ + INIT_ID(genexpr), \ + INIT_ID(get), \ + INIT_ID(get_debug), \ + INIT_ID(get_event_loop), \ + INIT_ID(get_loop), \ + INIT_ID(get_source), \ + INIT_ID(getattr), \ + INIT_ID(getstate), \ + INIT_ID(gid), \ + INIT_ID(globals), \ + INIT_ID(groupindex), \ + INIT_ID(groups), \ + INIT_ID(handle), \ + INIT_ID(hash_name), \ + INIT_ID(header), \ + INIT_ID(headers), \ + INIT_ID(hi), \ + INIT_ID(hook), \ + INIT_ID(id), \ + INIT_ID(ident), \ + INIT_ID(ignore), \ + INIT_ID(imag), \ + INIT_ID(importlib), \ + INIT_ID(in_fd), \ + INIT_ID(incoming), \ + INIT_ID(indexgroup), \ + INIT_ID(inf), \ + INIT_ID(inheritable), \ + INIT_ID(initial), \ + INIT_ID(initial_bytes), \ + INIT_ID(initial_value), \ + INIT_ID(initval), \ + INIT_ID(inner_size), \ + INIT_ID(input), \ + INIT_ID(insert_comments), \ + INIT_ID(insert_pis), \ + INIT_ID(instructions), \ + INIT_ID(intern), \ + INIT_ID(intersection), \ + INIT_ID(isatty), \ + INIT_ID(isinstance), \ + INIT_ID(isoformat), \ + INIT_ID(isolation_level), \ + INIT_ID(istext), \ + INIT_ID(item), \ + INIT_ID(items), \ + INIT_ID(iter), \ + INIT_ID(iterable), \ + INIT_ID(iterations), \ + INIT_ID(join), \ + INIT_ID(jump), \ + INIT_ID(keepends), \ + INIT_ID(key), \ + INIT_ID(keyfile), \ + INIT_ID(keys), \ + INIT_ID(kind), \ + INIT_ID(kw), \ + INIT_ID(kw1), \ + INIT_ID(kw2), \ + INIT_ID(lambda), \ + INIT_ID(last), \ + INIT_ID(last_node), \ + INIT_ID(last_traceback), \ + INIT_ID(last_type), \ + INIT_ID(last_value), \ + INIT_ID(latin1), \ + INIT_ID(leaf_size), \ + INIT_ID(len), \ + INIT_ID(length), \ + INIT_ID(level), \ + INIT_ID(limit), \ + INIT_ID(line), \ + INIT_ID(line_buffering), \ + INIT_ID(lineno), \ + INIT_ID(listcomp), \ + INIT_ID(little), \ + INIT_ID(lo), \ + INIT_ID(locale), \ + INIT_ID(locals), \ + INIT_ID(logoption), \ + INIT_ID(loop), \ + INIT_ID(mapping), \ + INIT_ID(match), \ + INIT_ID(max_length), \ + INIT_ID(maxdigits), \ + INIT_ID(maxevents), \ + INIT_ID(maxmem), \ + INIT_ID(maxsplit), \ + INIT_ID(maxvalue), \ + INIT_ID(memLevel), \ + INIT_ID(memlimit), \ + INIT_ID(message), \ + INIT_ID(metaclass), \ + INIT_ID(method), \ + INIT_ID(mod), \ + INIT_ID(mode), \ + INIT_ID(module), \ + INIT_ID(module_globals), \ + INIT_ID(modules), \ + INIT_ID(mro), \ + INIT_ID(msg), \ + INIT_ID(mycmp), \ + INIT_ID(n), \ + INIT_ID(n_arg), \ + INIT_ID(n_fields), \ + INIT_ID(n_sequence_fields), \ + INIT_ID(n_unnamed_fields), \ + INIT_ID(name), \ + INIT_ID(name_from), \ + INIT_ID(namespace_separator), \ + INIT_ID(namespaces), \ + INIT_ID(narg), \ + INIT_ID(ndigits), \ + INIT_ID(new_limit), \ + INIT_ID(newline), \ + INIT_ID(newlines), \ + INIT_ID(next), \ + INIT_ID(node_depth), \ + INIT_ID(node_offset), \ + INIT_ID(ns), \ + INIT_ID(nstype), \ + INIT_ID(nt), \ + INIT_ID(null), \ + INIT_ID(number), \ + INIT_ID(obj), \ + INIT_ID(object), \ + INIT_ID(offset), \ + INIT_ID(offset_dst), \ + INIT_ID(offset_src), \ + INIT_ID(on_type_read), \ + INIT_ID(onceregistry), \ + INIT_ID(only_keys), \ + INIT_ID(oparg), \ + INIT_ID(opcode), \ + INIT_ID(open), \ + INIT_ID(opener), \ + INIT_ID(operation), \ + INIT_ID(optimize), \ + INIT_ID(options), \ + INIT_ID(order), \ + INIT_ID(out_fd), \ + INIT_ID(outgoing), \ + INIT_ID(overlapped), \ + INIT_ID(owner), \ + INIT_ID(p), \ + INIT_ID(pages), \ + INIT_ID(parent), \ + INIT_ID(password), \ + INIT_ID(path), \ + INIT_ID(pattern), \ + INIT_ID(peek), \ + INIT_ID(persistent_id), \ + INIT_ID(persistent_load), \ + INIT_ID(person), \ + INIT_ID(pi_factory), \ + INIT_ID(pid), \ + INIT_ID(policy), \ + INIT_ID(pos), \ + INIT_ID(pos1), \ + INIT_ID(pos2), \ + INIT_ID(posix), \ + INIT_ID(print_file_and_line), \ + INIT_ID(priority), \ + INIT_ID(progress), \ + INIT_ID(progress_handler), \ + INIT_ID(proto), \ + INIT_ID(protocol), \ + INIT_ID(ps1), \ + INIT_ID(ps2), \ + INIT_ID(query), \ + INIT_ID(quotetabs), \ + INIT_ID(r), \ + INIT_ID(raw), \ + INIT_ID(read), \ + INIT_ID(read1), \ + INIT_ID(readable), \ + INIT_ID(readall), \ + INIT_ID(readinto), \ + INIT_ID(readinto1), \ + INIT_ID(readline), \ + INIT_ID(readonly), \ + INIT_ID(real), \ + INIT_ID(reducer_override), \ + INIT_ID(registry), \ + INIT_ID(rel_tol), \ + INIT_ID(reload), \ + INIT_ID(repl), \ + INIT_ID(replace), \ + INIT_ID(reserved), \ + INIT_ID(reset), \ + INIT_ID(resetids), \ + INIT_ID(return), \ + INIT_ID(reverse), \ + INIT_ID(reversed), \ + INIT_ID(s), \ + INIT_ID(salt), \ + INIT_ID(sched_priority), \ + INIT_ID(scheduler), \ + INIT_ID(seek), \ + INIT_ID(seekable), \ + INIT_ID(selectors), \ + INIT_ID(self), \ + INIT_ID(send), \ + INIT_ID(sep), \ + INIT_ID(sequence), \ + INIT_ID(server_hostname), \ + INIT_ID(server_side), \ + INIT_ID(session), \ + INIT_ID(setcomp), \ + INIT_ID(setpgroup), \ + INIT_ID(setsid), \ + INIT_ID(setsigdef), \ + INIT_ID(setsigmask), \ + INIT_ID(setstate), \ + INIT_ID(shape), \ + INIT_ID(show_cmd), \ + INIT_ID(signed), \ + INIT_ID(size), \ + INIT_ID(sizehint), \ + INIT_ID(sleep), \ + INIT_ID(sock), \ + INIT_ID(sort), \ + INIT_ID(sound), \ + INIT_ID(source), \ + INIT_ID(source_traceback), \ + INIT_ID(src), \ + INIT_ID(src_dir_fd), \ + INIT_ID(stacklevel), \ + INIT_ID(start), \ + INIT_ID(statement), \ + INIT_ID(status), \ + INIT_ID(stderr), \ + INIT_ID(stdin), \ + INIT_ID(stdout), \ + INIT_ID(step), \ + INIT_ID(store_name), \ + INIT_ID(strategy), \ + INIT_ID(strftime), \ + INIT_ID(strict), \ + INIT_ID(strict_mode), \ + INIT_ID(string), \ + INIT_ID(sub_key), \ + INIT_ID(symmetric_difference_update), \ + INIT_ID(tabsize), \ + INIT_ID(tag), \ + INIT_ID(target), \ + INIT_ID(target_is_directory), \ + INIT_ID(task), \ + INIT_ID(tb_frame), \ + INIT_ID(tb_lasti), \ + INIT_ID(tb_lineno), \ + INIT_ID(tb_next), \ + INIT_ID(tell), \ + INIT_ID(template), \ + INIT_ID(term), \ + INIT_ID(text), \ + INIT_ID(threading), \ + INIT_ID(throw), \ + INIT_ID(timeout), \ + INIT_ID(times), \ + INIT_ID(timetuple), \ + INIT_ID(top), \ + INIT_ID(trace_callback), \ + INIT_ID(traceback), \ + INIT_ID(trailers), \ + INIT_ID(translate), \ + INIT_ID(true), \ + INIT_ID(truncate), \ + INIT_ID(twice), \ + INIT_ID(txt), \ + INIT_ID(type), \ + INIT_ID(tz), \ + INIT_ID(tzname), \ + INIT_ID(uid), \ + INIT_ID(unlink), \ + INIT_ID(unraisablehook), \ + INIT_ID(uri), \ + INIT_ID(usedforsecurity), \ + INIT_ID(value), \ + INIT_ID(values), \ + INIT_ID(version), \ + INIT_ID(warnings), \ + INIT_ID(warnoptions), \ + INIT_ID(wbits), \ + INIT_ID(week), \ + INIT_ID(weekday), \ + INIT_ID(which), \ + INIT_ID(who), \ + INIT_ID(withdata), \ + INIT_ID(writable), \ + INIT_ID(write), \ + INIT_ID(write_through), \ + INIT_ID(x), \ + INIT_ID(year), \ + INIT_ID(zdict), \ +} + +#define _Py_str_ascii_INIT { \ + _PyASCIIObject_INIT("\x00"), \ + _PyASCIIObject_INIT("\x01"), \ + _PyASCIIObject_INIT("\x02"), \ + _PyASCIIObject_INIT("\x03"), \ + _PyASCIIObject_INIT("\x04"), \ + _PyASCIIObject_INIT("\x05"), \ + _PyASCIIObject_INIT("\x06"), \ + _PyASCIIObject_INIT("\x07"), \ + _PyASCIIObject_INIT("\x08"), \ + _PyASCIIObject_INIT("\x09"), \ + _PyASCIIObject_INIT("\x0a"), \ + _PyASCIIObject_INIT("\x0b"), \ + _PyASCIIObject_INIT("\x0c"), \ + _PyASCIIObject_INIT("\x0d"), \ + _PyASCIIObject_INIT("\x0e"), \ + _PyASCIIObject_INIT("\x0f"), \ + _PyASCIIObject_INIT("\x10"), \ + _PyASCIIObject_INIT("\x11"), \ + _PyASCIIObject_INIT("\x12"), \ + _PyASCIIObject_INIT("\x13"), \ + _PyASCIIObject_INIT("\x14"), \ + _PyASCIIObject_INIT("\x15"), \ + _PyASCIIObject_INIT("\x16"), \ + _PyASCIIObject_INIT("\x17"), \ + _PyASCIIObject_INIT("\x18"), \ + _PyASCIIObject_INIT("\x19"), \ + _PyASCIIObject_INIT("\x1a"), \ + _PyASCIIObject_INIT("\x1b"), \ + _PyASCIIObject_INIT("\x1c"), \ + _PyASCIIObject_INIT("\x1d"), \ + _PyASCIIObject_INIT("\x1e"), \ + _PyASCIIObject_INIT("\x1f"), \ + _PyASCIIObject_INIT("\x20"), \ + _PyASCIIObject_INIT("\x21"), \ + _PyASCIIObject_INIT("\x22"), \ + _PyASCIIObject_INIT("\x23"), \ + _PyASCIIObject_INIT("\x24"), \ + _PyASCIIObject_INIT("\x25"), \ + _PyASCIIObject_INIT("\x26"), \ + _PyASCIIObject_INIT("\x27"), \ + _PyASCIIObject_INIT("\x28"), \ + _PyASCIIObject_INIT("\x29"), \ + _PyASCIIObject_INIT("\x2a"), \ + _PyASCIIObject_INIT("\x2b"), \ + _PyASCIIObject_INIT("\x2c"), \ + _PyASCIIObject_INIT("\x2d"), \ + _PyASCIIObject_INIT("\x2e"), \ + _PyASCIIObject_INIT("\x2f"), \ + _PyASCIIObject_INIT("\x30"), \ + _PyASCIIObject_INIT("\x31"), \ + _PyASCIIObject_INIT("\x32"), \ + _PyASCIIObject_INIT("\x33"), \ + _PyASCIIObject_INIT("\x34"), \ + _PyASCIIObject_INIT("\x35"), \ + _PyASCIIObject_INIT("\x36"), \ + _PyASCIIObject_INIT("\x37"), \ + _PyASCIIObject_INIT("\x38"), \ + _PyASCIIObject_INIT("\x39"), \ + _PyASCIIObject_INIT("\x3a"), \ + _PyASCIIObject_INIT("\x3b"), \ + _PyASCIIObject_INIT("\x3c"), \ + _PyASCIIObject_INIT("\x3d"), \ + _PyASCIIObject_INIT("\x3e"), \ + _PyASCIIObject_INIT("\x3f"), \ + _PyASCIIObject_INIT("\x40"), \ + _PyASCIIObject_INIT("\x41"), \ + _PyASCIIObject_INIT("\x42"), \ + _PyASCIIObject_INIT("\x43"), \ + _PyASCIIObject_INIT("\x44"), \ + _PyASCIIObject_INIT("\x45"), \ + _PyASCIIObject_INIT("\x46"), \ + _PyASCIIObject_INIT("\x47"), \ + _PyASCIIObject_INIT("\x48"), \ + _PyASCIIObject_INIT("\x49"), \ + _PyASCIIObject_INIT("\x4a"), \ + _PyASCIIObject_INIT("\x4b"), \ + _PyASCIIObject_INIT("\x4c"), \ + _PyASCIIObject_INIT("\x4d"), \ + _PyASCIIObject_INIT("\x4e"), \ + _PyASCIIObject_INIT("\x4f"), \ + _PyASCIIObject_INIT("\x50"), \ + _PyASCIIObject_INIT("\x51"), \ + _PyASCIIObject_INIT("\x52"), \ + _PyASCIIObject_INIT("\x53"), \ + _PyASCIIObject_INIT("\x54"), \ + _PyASCIIObject_INIT("\x55"), \ + _PyASCIIObject_INIT("\x56"), \ + _PyASCIIObject_INIT("\x57"), \ + _PyASCIIObject_INIT("\x58"), \ + _PyASCIIObject_INIT("\x59"), \ + _PyASCIIObject_INIT("\x5a"), \ + _PyASCIIObject_INIT("\x5b"), \ + _PyASCIIObject_INIT("\x5c"), \ + _PyASCIIObject_INIT("\x5d"), \ + _PyASCIIObject_INIT("\x5e"), \ + _PyASCIIObject_INIT("\x5f"), \ + _PyASCIIObject_INIT("\x60"), \ + _PyASCIIObject_INIT("\x61"), \ + _PyASCIIObject_INIT("\x62"), \ + _PyASCIIObject_INIT("\x63"), \ + _PyASCIIObject_INIT("\x64"), \ + _PyASCIIObject_INIT("\x65"), \ + _PyASCIIObject_INIT("\x66"), \ + _PyASCIIObject_INIT("\x67"), \ + _PyASCIIObject_INIT("\x68"), \ + _PyASCIIObject_INIT("\x69"), \ + _PyASCIIObject_INIT("\x6a"), \ + _PyASCIIObject_INIT("\x6b"), \ + _PyASCIIObject_INIT("\x6c"), \ + _PyASCIIObject_INIT("\x6d"), \ + _PyASCIIObject_INIT("\x6e"), \ + _PyASCIIObject_INIT("\x6f"), \ + _PyASCIIObject_INIT("\x70"), \ + _PyASCIIObject_INIT("\x71"), \ + _PyASCIIObject_INIT("\x72"), \ + _PyASCIIObject_INIT("\x73"), \ + _PyASCIIObject_INIT("\x74"), \ + _PyASCIIObject_INIT("\x75"), \ + _PyASCIIObject_INIT("\x76"), \ + _PyASCIIObject_INIT("\x77"), \ + _PyASCIIObject_INIT("\x78"), \ + _PyASCIIObject_INIT("\x79"), \ + _PyASCIIObject_INIT("\x7a"), \ + _PyASCIIObject_INIT("\x7b"), \ + _PyASCIIObject_INIT("\x7c"), \ + _PyASCIIObject_INIT("\x7d"), \ + _PyASCIIObject_INIT("\x7e"), \ + _PyASCIIObject_INIT("\x7f"), \ +} + +#define _Py_str_latin1_INIT { \ + _PyUnicode_LATIN1_INIT("\x80", "\xc2\x80"), \ + _PyUnicode_LATIN1_INIT("\x81", "\xc2\x81"), \ + _PyUnicode_LATIN1_INIT("\x82", "\xc2\x82"), \ + _PyUnicode_LATIN1_INIT("\x83", "\xc2\x83"), \ + _PyUnicode_LATIN1_INIT("\x84", "\xc2\x84"), \ + _PyUnicode_LATIN1_INIT("\x85", "\xc2\x85"), \ + _PyUnicode_LATIN1_INIT("\x86", "\xc2\x86"), \ + _PyUnicode_LATIN1_INIT("\x87", "\xc2\x87"), \ + _PyUnicode_LATIN1_INIT("\x88", "\xc2\x88"), \ + _PyUnicode_LATIN1_INIT("\x89", "\xc2\x89"), \ + _PyUnicode_LATIN1_INIT("\x8a", "\xc2\x8a"), \ + _PyUnicode_LATIN1_INIT("\x8b", "\xc2\x8b"), \ + _PyUnicode_LATIN1_INIT("\x8c", "\xc2\x8c"), \ + _PyUnicode_LATIN1_INIT("\x8d", "\xc2\x8d"), \ + _PyUnicode_LATIN1_INIT("\x8e", "\xc2\x8e"), \ + _PyUnicode_LATIN1_INIT("\x8f", "\xc2\x8f"), \ + _PyUnicode_LATIN1_INIT("\x90", "\xc2\x90"), \ + _PyUnicode_LATIN1_INIT("\x91", "\xc2\x91"), \ + _PyUnicode_LATIN1_INIT("\x92", "\xc2\x92"), \ + _PyUnicode_LATIN1_INIT("\x93", "\xc2\x93"), \ + _PyUnicode_LATIN1_INIT("\x94", "\xc2\x94"), \ + _PyUnicode_LATIN1_INIT("\x95", "\xc2\x95"), \ + _PyUnicode_LATIN1_INIT("\x96", "\xc2\x96"), \ + _PyUnicode_LATIN1_INIT("\x97", "\xc2\x97"), \ + _PyUnicode_LATIN1_INIT("\x98", "\xc2\x98"), \ + _PyUnicode_LATIN1_INIT("\x99", "\xc2\x99"), \ + _PyUnicode_LATIN1_INIT("\x9a", "\xc2\x9a"), \ + _PyUnicode_LATIN1_INIT("\x9b", "\xc2\x9b"), \ + _PyUnicode_LATIN1_INIT("\x9c", "\xc2\x9c"), \ + _PyUnicode_LATIN1_INIT("\x9d", "\xc2\x9d"), \ + _PyUnicode_LATIN1_INIT("\x9e", "\xc2\x9e"), \ + _PyUnicode_LATIN1_INIT("\x9f", "\xc2\x9f"), \ + _PyUnicode_LATIN1_INIT("\xa0", "\xc2\xa0"), \ + _PyUnicode_LATIN1_INIT("\xa1", "\xc2\xa1"), \ + _PyUnicode_LATIN1_INIT("\xa2", "\xc2\xa2"), \ + _PyUnicode_LATIN1_INIT("\xa3", "\xc2\xa3"), \ + _PyUnicode_LATIN1_INIT("\xa4", "\xc2\xa4"), \ + _PyUnicode_LATIN1_INIT("\xa5", "\xc2\xa5"), \ + _PyUnicode_LATIN1_INIT("\xa6", "\xc2\xa6"), \ + _PyUnicode_LATIN1_INIT("\xa7", "\xc2\xa7"), \ + _PyUnicode_LATIN1_INIT("\xa8", "\xc2\xa8"), \ + _PyUnicode_LATIN1_INIT("\xa9", "\xc2\xa9"), \ + _PyUnicode_LATIN1_INIT("\xaa", "\xc2\xaa"), \ + _PyUnicode_LATIN1_INIT("\xab", "\xc2\xab"), \ + _PyUnicode_LATIN1_INIT("\xac", "\xc2\xac"), \ + _PyUnicode_LATIN1_INIT("\xad", "\xc2\xad"), \ + _PyUnicode_LATIN1_INIT("\xae", "\xc2\xae"), \ + _PyUnicode_LATIN1_INIT("\xaf", "\xc2\xaf"), \ + _PyUnicode_LATIN1_INIT("\xb0", "\xc2\xb0"), \ + _PyUnicode_LATIN1_INIT("\xb1", "\xc2\xb1"), \ + _PyUnicode_LATIN1_INIT("\xb2", "\xc2\xb2"), \ + _PyUnicode_LATIN1_INIT("\xb3", "\xc2\xb3"), \ + _PyUnicode_LATIN1_INIT("\xb4", "\xc2\xb4"), \ + _PyUnicode_LATIN1_INIT("\xb5", "\xc2\xb5"), \ + _PyUnicode_LATIN1_INIT("\xb6", "\xc2\xb6"), \ + _PyUnicode_LATIN1_INIT("\xb7", "\xc2\xb7"), \ + _PyUnicode_LATIN1_INIT("\xb8", "\xc2\xb8"), \ + _PyUnicode_LATIN1_INIT("\xb9", "\xc2\xb9"), \ + _PyUnicode_LATIN1_INIT("\xba", "\xc2\xba"), \ + _PyUnicode_LATIN1_INIT("\xbb", "\xc2\xbb"), \ + _PyUnicode_LATIN1_INIT("\xbc", "\xc2\xbc"), \ + _PyUnicode_LATIN1_INIT("\xbd", "\xc2\xbd"), \ + _PyUnicode_LATIN1_INIT("\xbe", "\xc2\xbe"), \ + _PyUnicode_LATIN1_INIT("\xbf", "\xc2\xbf"), \ + _PyUnicode_LATIN1_INIT("\xc0", "\xc3\x80"), \ + _PyUnicode_LATIN1_INIT("\xc1", "\xc3\x81"), \ + _PyUnicode_LATIN1_INIT("\xc2", "\xc3\x82"), \ + _PyUnicode_LATIN1_INIT("\xc3", "\xc3\x83"), \ + _PyUnicode_LATIN1_INIT("\xc4", "\xc3\x84"), \ + _PyUnicode_LATIN1_INIT("\xc5", "\xc3\x85"), \ + _PyUnicode_LATIN1_INIT("\xc6", "\xc3\x86"), \ + _PyUnicode_LATIN1_INIT("\xc7", "\xc3\x87"), \ + _PyUnicode_LATIN1_INIT("\xc8", "\xc3\x88"), \ + _PyUnicode_LATIN1_INIT("\xc9", "\xc3\x89"), \ + _PyUnicode_LATIN1_INIT("\xca", "\xc3\x8a"), \ + _PyUnicode_LATIN1_INIT("\xcb", "\xc3\x8b"), \ + _PyUnicode_LATIN1_INIT("\xcc", "\xc3\x8c"), \ + _PyUnicode_LATIN1_INIT("\xcd", "\xc3\x8d"), \ + _PyUnicode_LATIN1_INIT("\xce", "\xc3\x8e"), \ + _PyUnicode_LATIN1_INIT("\xcf", "\xc3\x8f"), \ + _PyUnicode_LATIN1_INIT("\xd0", "\xc3\x90"), \ + _PyUnicode_LATIN1_INIT("\xd1", "\xc3\x91"), \ + _PyUnicode_LATIN1_INIT("\xd2", "\xc3\x92"), \ + _PyUnicode_LATIN1_INIT("\xd3", "\xc3\x93"), \ + _PyUnicode_LATIN1_INIT("\xd4", "\xc3\x94"), \ + _PyUnicode_LATIN1_INIT("\xd5", "\xc3\x95"), \ + _PyUnicode_LATIN1_INIT("\xd6", "\xc3\x96"), \ + _PyUnicode_LATIN1_INIT("\xd7", "\xc3\x97"), \ + _PyUnicode_LATIN1_INIT("\xd8", "\xc3\x98"), \ + _PyUnicode_LATIN1_INIT("\xd9", "\xc3\x99"), \ + _PyUnicode_LATIN1_INIT("\xda", "\xc3\x9a"), \ + _PyUnicode_LATIN1_INIT("\xdb", "\xc3\x9b"), \ + _PyUnicode_LATIN1_INIT("\xdc", "\xc3\x9c"), \ + _PyUnicode_LATIN1_INIT("\xdd", "\xc3\x9d"), \ + _PyUnicode_LATIN1_INIT("\xde", "\xc3\x9e"), \ + _PyUnicode_LATIN1_INIT("\xdf", "\xc3\x9f"), \ + _PyUnicode_LATIN1_INIT("\xe0", "\xc3\xa0"), \ + _PyUnicode_LATIN1_INIT("\xe1", "\xc3\xa1"), \ + _PyUnicode_LATIN1_INIT("\xe2", "\xc3\xa2"), \ + _PyUnicode_LATIN1_INIT("\xe3", "\xc3\xa3"), \ + _PyUnicode_LATIN1_INIT("\xe4", "\xc3\xa4"), \ + _PyUnicode_LATIN1_INIT("\xe5", "\xc3\xa5"), \ + _PyUnicode_LATIN1_INIT("\xe6", "\xc3\xa6"), \ + _PyUnicode_LATIN1_INIT("\xe7", "\xc3\xa7"), \ + _PyUnicode_LATIN1_INIT("\xe8", "\xc3\xa8"), \ + _PyUnicode_LATIN1_INIT("\xe9", "\xc3\xa9"), \ + _PyUnicode_LATIN1_INIT("\xea", "\xc3\xaa"), \ + _PyUnicode_LATIN1_INIT("\xeb", "\xc3\xab"), \ + _PyUnicode_LATIN1_INIT("\xec", "\xc3\xac"), \ + _PyUnicode_LATIN1_INIT("\xed", "\xc3\xad"), \ + _PyUnicode_LATIN1_INIT("\xee", "\xc3\xae"), \ + _PyUnicode_LATIN1_INIT("\xef", "\xc3\xaf"), \ + _PyUnicode_LATIN1_INIT("\xf0", "\xc3\xb0"), \ + _PyUnicode_LATIN1_INIT("\xf1", "\xc3\xb1"), \ + _PyUnicode_LATIN1_INIT("\xf2", "\xc3\xb2"), \ + _PyUnicode_LATIN1_INIT("\xf3", "\xc3\xb3"), \ + _PyUnicode_LATIN1_INIT("\xf4", "\xc3\xb4"), \ + _PyUnicode_LATIN1_INIT("\xf5", "\xc3\xb5"), \ + _PyUnicode_LATIN1_INIT("\xf6", "\xc3\xb6"), \ + _PyUnicode_LATIN1_INIT("\xf7", "\xc3\xb7"), \ + _PyUnicode_LATIN1_INIT("\xf8", "\xc3\xb8"), \ + _PyUnicode_LATIN1_INIT("\xf9", "\xc3\xb9"), \ + _PyUnicode_LATIN1_INIT("\xfa", "\xc3\xba"), \ + _PyUnicode_LATIN1_INIT("\xfb", "\xc3\xbb"), \ + _PyUnicode_LATIN1_INIT("\xfc", "\xc3\xbc"), \ + _PyUnicode_LATIN1_INIT("\xfd", "\xc3\xbd"), \ + _PyUnicode_LATIN1_INIT("\xfe", "\xc3\xbe"), \ + _PyUnicode_LATIN1_INIT("\xff", "\xc3\xbf"), \ } -#endif /* End auto-generated code */ + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_signal.h b/Include/internal/pycore_signal.h index b921dd170e9f6f..ca3f69d09fc0c1 100644 --- a/Include/internal/pycore_signal.h +++ b/Include/internal/pycore_signal.h @@ -10,8 +10,11 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_atomic.h" // _Py_atomic_address + #include <signal.h> // NSIG + #ifdef _SIG_MAXSIG // gh-91145: On FreeBSD, <signal.h> defines NSIG as 32: it doesn't include // realtime signals: [SIGRTMIN,SIGRTMAX]. Use _SIG_MAXSIG instead. For @@ -29,6 +32,66 @@ extern "C" { # define Py_NSIG 64 // Use a reasonable default value #endif +#define INVALID_FD (-1) + +struct _signals_runtime_state { + volatile struct { + _Py_atomic_int tripped; + /* func is atomic to ensure that PyErr_SetInterrupt is async-signal-safe + * (even though it would probably be otherwise, anyway). + */ + _Py_atomic_address func; + } handlers[Py_NSIG]; + + volatile struct { +#ifdef MS_WINDOWS + /* This would be "SOCKET fd" if <winsock2.h> were always included. + It isn't so we must cast to SOCKET where appropriate. */ + volatile int fd; +#elif defined(__VXWORKS__) + int fd; +#else + sig_atomic_t fd; +#endif + + int warn_on_full_buffer; +#ifdef MS_WINDOWS + int use_send; +#endif + } wakeup; + + /* Speed up sigcheck() when none tripped */ + _Py_atomic_int is_tripped; + + /* These objects necessarily belong to the main interpreter. */ + PyObject *default_handler; + PyObject *ignore_handler; + +#ifdef MS_WINDOWS + /* This would be "HANDLE sigint_event" if <windows.h> were always included. + It isn't so we must cast to HANDLE everywhere "sigint_event" is used. */ + void *sigint_event; +#endif + + /* True if the main interpreter thread exited due to an unhandled + * KeyboardInterrupt exception, suggesting the user pressed ^C. */ + int unhandled_keyboard_interrupt; +}; + +#ifdef MS_WINDOWS +# define _signals_WAKEUP_INIT \ + {.fd = INVALID_FD, .warn_on_full_buffer = 1, .use_send = 0} +#else +# define _signals_WAKEUP_INIT \ + {.fd = INVALID_FD, .warn_on_full_buffer = 1} +#endif + +#define _signals_RUNTIME_INIT \ + { \ + .wakeup = _signals_WAKEUP_INIT, \ + } + + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_time.h b/Include/internal/pycore_time.h new file mode 100644 index 00000000000000..949170c4493799 --- /dev/null +++ b/Include/internal/pycore_time.h @@ -0,0 +1,25 @@ +#ifndef Py_INTERNAL_TIME_H +#define Py_INTERNAL_TIME_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +struct _time_runtime_state { +#ifdef HAVE_TIMES + int ticks_per_second_initialized; + long ticks_per_second; +#else + int _not_used; +#endif +}; + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_TIME_H */ diff --git a/Include/internal/pycore_tracemalloc.h b/Include/internal/pycore_tracemalloc.h new file mode 100644 index 00000000000000..08d7d1096c78ce --- /dev/null +++ b/Include/internal/pycore_tracemalloc.h @@ -0,0 +1,121 @@ +#ifndef Py_INTERNAL_TRACEMALLOC_H +#define Py_INTERNAL_TRACEMALLOC_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "pycore_hashtable.h" // _Py_hashtable_t + + +/* Trace memory blocks allocated by PyMem_RawMalloc() */ +#define TRACE_RAW_MALLOC + + +struct _PyTraceMalloc_Config { + /* Module initialized? + Variable protected by the GIL */ + enum { + TRACEMALLOC_NOT_INITIALIZED, + TRACEMALLOC_INITIALIZED, + TRACEMALLOC_FINALIZED + } initialized; + + /* Is tracemalloc tracing memory allocations? + Variable protected by the GIL */ + int tracing; + + /* limit of the number of frames in a traceback, 1 by default. + Variable protected by the GIL. */ + int max_nframe; +}; + + +/* Pack the frame_t structure to reduce the memory footprint on 64-bit + architectures: 12 bytes instead of 16. */ +struct +#ifdef __GNUC__ +__attribute__((packed)) +#elif defined(_MSC_VER) +#pragma pack(push, 4) +#endif +tracemalloc_frame { + /* filename cannot be NULL: "<unknown>" is used if the Python frame + filename is NULL */ + PyObject *filename; + unsigned int lineno; +}; +#ifdef _MSC_VER +#pragma pack(pop) +#endif + +struct tracemalloc_traceback { + Py_uhash_t hash; + /* Number of frames stored */ + uint16_t nframe; + /* Total number of frames the traceback had */ + uint16_t total_nframe; + struct tracemalloc_frame frames[1]; +}; + + +struct _tracemalloc_runtime_state { + struct _PyTraceMalloc_Config config; + + /* Protected by the GIL */ + struct { + PyMemAllocatorEx mem; + PyMemAllocatorEx raw; + PyMemAllocatorEx obj; + } allocators; + +#if defined(TRACE_RAW_MALLOC) + PyThread_type_lock tables_lock; +#endif + /* Size in bytes of currently traced memory. + Protected by TABLES_LOCK(). */ + size_t traced_memory; + /* Peak size in bytes of traced memory. + Protected by TABLES_LOCK(). */ + size_t peak_traced_memory; + /* Hash table used as a set to intern filenames: + PyObject* => PyObject*. + Protected by the GIL */ + _Py_hashtable_t *filenames; + /* Buffer to store a new traceback in traceback_new(). + Protected by the GIL. */ + struct tracemalloc_traceback *traceback; + /* Hash table used as a set to intern tracebacks: + traceback_t* => traceback_t* + Protected by the GIL */ + _Py_hashtable_t *tracebacks; + /* pointer (void*) => trace (trace_t*). + Protected by TABLES_LOCK(). */ + _Py_hashtable_t *traces; + /* domain (unsigned int) => traces (_Py_hashtable_t). + Protected by TABLES_LOCK(). */ + _Py_hashtable_t *domains; + + struct tracemalloc_traceback empty_traceback; + + Py_tss_t reentrant_key; +}; + +#define _tracemalloc_runtime_state_INIT \ + { \ + .config = { \ + .initialized = TRACEMALLOC_NOT_INITIALIZED, \ + .tracing = 0, \ + .max_nframe = 1, \ + }, \ + .reentrant_key = Py_tss_NEEDS_INIT, \ + } + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_TRACEMALLOC_H diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index 1efe4fa2bdef94..edc70843b57531 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -62,11 +62,18 @@ struct _Py_tuple_state { #endif }; -#define _PyTuple_ITEMS(op) (_PyTuple_CAST(op)->ob_item) +#define _PyTuple_ITEMS(op) _Py_RVALUE(_PyTuple_CAST(op)->ob_item) extern PyObject *_PyTuple_FromArray(PyObject *const *, Py_ssize_t); extern PyObject *_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t); + +typedef struct { + PyObject_HEAD + Py_ssize_t it_index; + PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */ +} _PyTupleIterObject; + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 5e7aca1b9f5ae9..c207ce615c6f91 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -18,6 +18,15 @@ extern void _PyTypes_Fini(PyInterpreterState *); /* other API */ +/* Length of array of slotdef pointers used to store slots with the + same __name__. There should be at most MAX_EQUIV-1 slotdef entries with + the same __name__, for any __name__. Since that's a static property, it is + appropriate to declare fixed-size arrays for this. */ +#define MAX_EQUIV 10 + +typedef struct wrapperbase pytype_slotdef; + + // Type attribute lookup cache: speed up attribute and method lookups, // see _PyType_Lookup(). struct type_cache_entry { @@ -27,15 +36,9 @@ struct type_cache_entry { }; #define MCACHE_SIZE_EXP 12 -#define MCACHE_STATS 0 struct type_cache { struct type_cache_entry hashtable[1 << MCACHE_SIZE_EXP]; -#if MCACHE_STATS - size_t hits; - size_t misses; - size_t collisions; -#endif }; /* For now we hard-code this to a value for which we are confident diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h index 63bf04b3e1b872..19faceebf1d8ee 100644 --- a/Include/internal/pycore_unicodeobject.h +++ b/Include/internal/pycore_unicodeobject.h @@ -9,6 +9,7 @@ extern "C" { #endif #include "pycore_fileutils.h" // _Py_error_handler +#include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI void _PyUnicode_ExactDealloc(PyObject *op); @@ -31,6 +32,10 @@ struct _Py_unicode_runtime_ids { Py_ssize_t next_index; }; +struct _Py_unicode_runtime_state { + struct _Py_unicode_runtime_ids ids; +}; + /* fs_codec.encoding is initialized to NULL. Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ struct _Py_unicode_fs_codec { @@ -48,6 +53,8 @@ struct _Py_unicode_ids { struct _Py_unicode_state { struct _Py_unicode_fs_codec fs_codec; + _PyUnicode_Name_CAPI *ucnhash_capi; + // Unicode identifiers (_Py_Identifier): see _PyUnicode_FromId() struct _Py_unicode_ids ids; }; diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h new file mode 100644 index 00000000000000..7f407c0141b8a5 --- /dev/null +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -0,0 +1,1336 @@ +#ifndef Py_INTERNAL_UNICODEOBJECT_GENERATED_H +#define Py_INTERNAL_UNICODEOBJECT_GENERATED_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +/* The following is auto-generated by Tools/build/generate_global_objects.py. */ +static inline void +_PyUnicode_InitStaticStrings(void) { + PyObject *string; + string = &_Py_ID(CANCELLED); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(FINISHED); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(False); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(JSONDecodeError); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(PENDING); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(Py_Repr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(TextIOWrapper); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(True); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(WarningMessage); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__IOBase_closed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__abc_tpflags__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__abs__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__abstractmethods__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__add__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__aenter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__aexit__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__aiter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__all__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__and__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__anext__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__annotations__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__args__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__asyncio_running_event_loop__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__await__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__bases__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__bool__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__build_class__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__builtins__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__bytes__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__call__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__cantrace__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__class__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__class_getitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__classcell__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__complex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__contains__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__copy__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ctypes_from_outparam__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__del__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__delattr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__delete__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__delitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__dict__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__dictoffset__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__dir__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__divmod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__doc__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__enter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__eq__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__exit__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__file__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__float__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__floordiv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__format__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__fspath__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ge__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__get__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getattr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getattribute__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getinitargs__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getnewargs__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getnewargs_ex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getstate__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__gt__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__hash__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__iadd__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__iand__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ifloordiv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ilshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__imatmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__imod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__import__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__imul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__index__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__init__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__init_subclass__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__instancecheck__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__int__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__invert__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ior__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ipow__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__irshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__isabstractmethod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__isub__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__iter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__itruediv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ixor__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__le__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__len__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__length_hint__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__lltrace__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__loader__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__lshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__lt__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__main__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__matmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__missing__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__mod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__module__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__mro_entries__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__mul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__name__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ne__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__neg__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__new__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__newobj__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__newobj_ex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__next__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__notes__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__or__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__orig_class__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__origin__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__package__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__parameters__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__path__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__pos__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__pow__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__prepare__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__qualname__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__radd__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rand__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rdivmod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__reduce__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__reduce_ex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__repr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__reversed__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rfloordiv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rlshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rmatmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rmod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ror__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__round__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rpow__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rrshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rsub__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rtruediv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rxor__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__set__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__set_name__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__setattr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__setitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__setstate__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__sizeof__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__slotnames__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__slots__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__spec__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__str__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__sub__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__subclasscheck__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__subclasshook__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__truediv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__trunc__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_is_unpacked_typevartuple__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_prepare_subst__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_subst__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_unpacked_tuple_args__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__warningregistry__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__weaklistoffset__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__weakref__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__xor__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_abc_impl); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_abstract_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_annotation); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_anonymous_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_argtypes_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_as_parameter_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_asyncio_future_blocking); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_blksize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_bootstrap); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_check_retval_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_dealloc_warn); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_feature_version); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_fields_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_finalizing); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_find_and_load); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_fix_up_module); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_flags_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_get_sourcefile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_handle_fromlist); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_initializing); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_is_text_encoding); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_length_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_lock_unlock_module); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_needs_com_addref_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_pack_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_restype_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_showwarnmsg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_shutdown); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_slotnames); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_strptime_datetime); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_swappedbytes_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_type_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_uninitialized_submodules); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_warn_unawaited_coroutine); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_xoptions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(a); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(abs_tol); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(access); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(add); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(add_done_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(after_in_child); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(after_in_parent); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(aggregate_class); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(append); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(argdefs); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(arguments); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(argv); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(as_integer_ratio); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ast); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(attribute); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(authorizer_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(autocommit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(b); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(backtick); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(base); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(before); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(big); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(binary_form); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(block); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffer); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffer_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffer_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffering); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffers); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(bufsize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(builtins); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(byteorder); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(bytes); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(bytes_per_sep); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c_call); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c_exception); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c_return); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cached_statements); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cadata); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cafile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(call); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(call_exception_handler); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(call_soon); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cancel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(capath); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(category); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cb_type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(certfile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(check_same_thread); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(clear); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(close); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(closed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(closefd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(closure); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_argcount); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_cellvars); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_code); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_consts); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_exceptiontable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_filename); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_firstlineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_flags); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_freevars); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_kwonlyargcount); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_linetable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_names); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_nlocals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_posonlyargcount); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_qualname); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_stacksize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_varnames); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(code); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(command); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(comment_factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(consts); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(context); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cookie); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(copy); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(copyreg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(coro); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(count); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cwd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(d); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(data); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(database); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(decode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(decoder); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(default); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(defaultaction); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(delete); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(depth); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(detect_types); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(deterministic); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(device); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dict); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dictcomp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(difference_update); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(digest); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(digest_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(digestmod); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dir_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(discard); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dispatch_table); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(displayhook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dklen); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(doc); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dont_inherit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dst); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dst_dir_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(duration); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(e); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(effective_ids); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(element_factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(encode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(encoding); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(end); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(end_lineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(end_offset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(endpos); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(env); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(errors); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(event); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(eventmask); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exc_type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exc_value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(excepthook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exception); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(extend); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(facility); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(false); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(family); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fanout); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fd2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fdel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fget); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(file); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(file_actions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(filename); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fileno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(filepath); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fillvalue); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(filters); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(final); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(find_class); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fix_imports); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(flags); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(flush); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(follow_symlinks); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(format); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(frequency); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(from_param); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fromlist); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fromtimestamp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fromutc); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(func); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(future); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(generation); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(genexpr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_debug); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_event_loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_source); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(getattr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(getstate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(gid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(globals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(groupindex); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(groups); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(handle); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(hash_name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(header); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(headers); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(hi); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(hook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(id); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ident); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ignore); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(imag); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(importlib); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(in_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(incoming); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(indexgroup); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(inf); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(inheritable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initial); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initial_bytes); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initial_value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initval); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(inner_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(input); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(insert_comments); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(insert_pis); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(instructions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(intern); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(intersection); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isatty); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isinstance); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isoformat); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isolation_level); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(istext); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(item); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(items); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(iter); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(iterable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(iterations); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(join); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(jump); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(keepends); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(key); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(keyfile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(keys); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kind); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kw); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kw1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kw2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(lambda); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_node); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_traceback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(latin1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(leaf_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(len); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(length); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(level); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(limit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(line); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(line_buffering); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(lineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(listcomp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(little); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(lo); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(locale); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(locals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(logoption); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mapping); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(match); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(max_length); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxdigits); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxevents); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxmem); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxsplit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxvalue); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(memLevel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(memlimit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(message); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(metaclass); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(method); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mod); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(module); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(module_globals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(modules); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mro); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(msg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mycmp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_arg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_fields); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_sequence_fields); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_unnamed_fields); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(name_from); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(namespace_separator); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(namespaces); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(narg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ndigits); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(new_limit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(newline); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(newlines); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(next); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(node_depth); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(node_offset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ns); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(nstype); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(nt); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(null); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(number); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(obj); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(object); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(offset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(offset_dst); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(offset_src); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(on_type_read); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(onceregistry); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(only_keys); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(oparg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(opcode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(open); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(opener); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(operation); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(optimize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(options); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(order); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(out_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(outgoing); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(overlapped); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(owner); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(p); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pages); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(parent); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(password); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(path); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pattern); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(peek); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(persistent_id); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(persistent_load); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(person); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pi_factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(policy); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pos); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pos1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pos2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(posix); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(print_file_and_line); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(priority); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(progress); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(progress_handler); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(proto); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(protocol); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ps1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ps2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(query); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(quotetabs); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(r); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(raw); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(read); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(read1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readall); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readinto); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readinto1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readline); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readonly); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(real); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reducer_override); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(registry); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(rel_tol); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reload); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(repl); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(replace); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reserved); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(resetids); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(return); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reverse); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reversed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(s); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(salt); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sched_priority); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(scheduler); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(seek); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(seekable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(selectors); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(self); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(send); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sep); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sequence); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(server_hostname); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(server_side); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(session); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setcomp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setpgroup); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setsid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setsigdef); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setsigmask); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setstate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(shape); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(show_cmd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(signed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sizehint); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sleep); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sock); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sort); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sound); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(source); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(source_traceback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(src); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(src_dir_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stacklevel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(start); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(statement); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(status); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stderr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stdin); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stdout); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(step); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(store_name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strategy); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strftime); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strict); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strict_mode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(string); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sub_key); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(symmetric_difference_update); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tabsize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tag); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(target); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(target_is_directory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(task); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_frame); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_lasti); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_lineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_next); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tell); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(template); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(term); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(text); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(threading); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(throw); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(timeout); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(times); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(timetuple); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(top); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(trace_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(traceback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(trailers); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(translate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(true); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(truncate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(twice); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(txt); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tz); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tzname); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(uid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(unlink); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(unraisablehook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(uri); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(usedforsecurity); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(values); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(version); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(warnings); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(warnoptions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(wbits); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(week); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(weekday); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(which); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(who); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(withdata); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(writable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(write); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(write_through); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(x); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(year); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(zdict); + PyUnicode_InternInPlace(&string); +} +/* End auto-generated code */ +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_UNICODEOBJECT_GENERATED_H */ diff --git a/Include/memoryobject.h b/Include/memoryobject.h index 19aec679a5fad1..2c9146aa2b5b06 100644 --- a/Include/memoryobject.h +++ b/Include/memoryobject.h @@ -6,20 +6,10 @@ extern "C" { #endif -#ifndef Py_LIMITED_API -PyAPI_DATA(PyTypeObject) _PyManagedBuffer_Type; -#endif PyAPI_DATA(PyTypeObject) PyMemoryView_Type; #define PyMemoryView_Check(op) Py_IS_TYPE((op), &PyMemoryView_Type) -#ifndef Py_LIMITED_API -/* Get a pointer to the memoryview's private copy of the exporter's buffer. */ -#define PyMemoryView_GET_BUFFER(op) (&((PyMemoryViewObject *)(op))->view) -/* Get a pointer to the exporting object (this may be NULL!). */ -#define PyMemoryView_GET_BASE(op) (((PyMemoryViewObject *)(op))->view.obj) -#endif - PyAPI_FUNC(PyObject *) PyMemoryView_FromObject(PyObject *base); #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000 PyAPI_FUNC(PyObject *) PyMemoryView_FromMemory(char *mem, Py_ssize_t size, @@ -32,38 +22,10 @@ PyAPI_FUNC(PyObject *) PyMemoryView_GetContiguous(PyObject *base, int buffertype, char order); - -/* The structs are declared here so that macros can work, but they shouldn't - be considered public. Don't access their fields directly, use the macros - and functions instead! */ #ifndef Py_LIMITED_API -#define _Py_MANAGED_BUFFER_RELEASED 0x001 /* access to exporter blocked */ -#define _Py_MANAGED_BUFFER_FREE_FORMAT 0x002 /* free format */ -typedef struct { - PyObject_HEAD - int flags; /* state flags */ - Py_ssize_t exports; /* number of direct memoryview exports */ - Py_buffer master; /* snapshot buffer obtained from the original exporter */ -} _PyManagedBufferObject; - - -/* memoryview state flags */ -#define _Py_MEMORYVIEW_RELEASED 0x001 /* access to master buffer blocked */ -#define _Py_MEMORYVIEW_C 0x002 /* C-contiguous layout */ -#define _Py_MEMORYVIEW_FORTRAN 0x004 /* Fortran contiguous layout */ -#define _Py_MEMORYVIEW_SCALAR 0x008 /* scalar: ndim = 0 */ -#define _Py_MEMORYVIEW_PIL 0x010 /* PIL-style layout */ - -typedef struct { - PyObject_VAR_HEAD - _PyManagedBufferObject *mbuf; /* managed buffer */ - Py_hash_t hash; /* hash value for read-only views */ - int flags; /* state flags */ - Py_ssize_t exports; /* number of buffer re-exports */ - Py_buffer view; /* private copy of the exporter's view */ - PyObject *weakreflist; - Py_ssize_t ob_array[1]; /* shape, strides, suboffsets */ -} PyMemoryViewObject; +# define Py_CPYTHON_MEMORYOBJECT_H +# include "cpython/memoryobject.h" +# undef Py_CPYTHON_MEMORYOBJECT_H #endif #ifdef __cplusplus diff --git a/Include/moduleobject.h b/Include/moduleobject.h index fbb2c5ae79444b..555564ec73b4a2 100644 --- a/Include/moduleobject.h +++ b/Include/moduleobject.h @@ -43,8 +43,22 @@ PyAPI_DATA(PyTypeObject) PyModuleDef_Type; typedef struct PyModuleDef_Base { PyObject_HEAD + /* The function used to re-initialize the module. + This is only set for legacy (single-phase init) extension modules + and only used for those that support multiple initializations + (m_size >= 0). + It is set by _PyImport_LoadDynamicModuleWithSpec() + and _imp.create_builtin(). */ PyObject* (*m_init)(void); + /* The module's index into its interpreter's modules_by_index cache. + This is set for all extension modules but only used for legacy ones. + (See PyInterpreterState.modules_by_index for more info.) + It is set by PyModuleDef_Init(). */ Py_ssize_t m_index; + /* A copy of the module's __dict__ after the first time it was loaded. + This is only set/used for legacy modules that do not support + multiple initializations. + It is set by _PyImport_FixupExtensionObject(). */ PyObject* m_copy; } PyModuleDef_Base; diff --git a/Include/object.h b/Include/object.h index 75624fe8c77a51..3774f126730005 100644 --- a/Include/object.h +++ b/Include/object.h @@ -598,15 +598,44 @@ static inline void Py_DECREF(PyObject *op) * one of those can't cause problems -- but in part that relies on that * Python integers aren't currently weakly referencable. Best practice is * to use Py_CLEAR() even if you can't think of a reason for why you need to. + * + * gh-98724: Use a temporary variable to only evaluate the macro argument once, + * to avoid the duplication of side effects if the argument has side effects. + * + * gh-99701: If the PyObject* type is used with casting arguments to PyObject*, + * the code can be miscompiled with strict aliasing because of type punning. + * With strict aliasing, a compiler considers that two pointers of different + * types cannot read or write the same memory which enables optimization + * opportunities. + * + * If available, use _Py_TYPEOF() to use the 'op' type for temporary variables, + * and so avoid type punning. Otherwise, use memcpy() which causes type erasure + * and so prevents the compiler to reuse an old cached 'op' value after + * Py_CLEAR(). */ -#define Py_CLEAR(op) \ - do { \ - PyObject *_py_tmp = _PyObject_CAST(op); \ - if (_py_tmp != NULL) { \ - (op) = NULL; \ - Py_DECREF(_py_tmp); \ - } \ +#ifdef _Py_TYPEOF +#define Py_CLEAR(op) \ + do { \ + _Py_TYPEOF(op)* _tmp_op_ptr = &(op); \ + _Py_TYPEOF(op) _tmp_old_op = (*_tmp_op_ptr); \ + if (_tmp_old_op != NULL) { \ + *_tmp_op_ptr = _Py_NULL; \ + Py_DECREF(_tmp_old_op); \ + } \ } while (0) +#else +#define Py_CLEAR(op) \ + do { \ + PyObject **_tmp_op_ptr = _Py_CAST(PyObject**, &(op)); \ + PyObject *_tmp_old_op = (*_tmp_op_ptr); \ + if (_tmp_old_op != NULL) { \ + PyObject *_null_ptr = _Py_NULL; \ + memcpy(_tmp_op_ptr, &_null_ptr, sizeof(PyObject*)); \ + Py_DECREF(_tmp_old_op); \ + } \ + } while (0) +#endif + /* Function to use in case the object pointer can be NULL: */ static inline void Py_XINCREF(PyObject *op) diff --git a/Include/opcode.h b/Include/opcode.h index 9b9414cff401f0..284f49284b37c0 100644 --- a/Include/opcode.h +++ b/Include/opcode.h @@ -11,12 +11,17 @@ extern "C" { #define CACHE 0 #define POP_TOP 1 #define PUSH_NULL 2 +#define INTERPRETER_EXIT 3 #define END_FOR 4 #define NOP 9 #define UNARY_POSITIVE 10 #define UNARY_NEGATIVE 11 #define UNARY_NOT 12 #define UNARY_INVERT 15 +#define UNARY_POSITIVE_R 16 +#define UNARY_NEGATIVE_R 17 +#define UNARY_NOT_R 18 +#define UNARY_INVERT_R 19 #define BINARY_SUBSCR 25 #define BINARY_SLICE 26 #define STORE_SLICE 27 @@ -36,6 +41,7 @@ extern "C" { #define CLEANUP_THROW 55 #define STORE_SUBSCR 60 #define DELETE_SUBSCR 61 +#define STOPITERATION_ERROR 63 #define GET_ITER 68 #define GET_YIELD_FROM_ITER 69 #define PRINT_EXPR 70 @@ -109,6 +115,8 @@ extern "C" { #define YIELD_VALUE 150 #define RESUME 151 #define MATCH_CLASS 152 +#define LOAD_FAST_R 153 +#define STORE_FAST_R 154 #define FORMAT_VALUE 155 #define BUILD_CONST_KEY_MAP 156 #define BUILD_STRING 157 @@ -127,48 +135,42 @@ extern "C" { #define JUMP_NO_INTERRUPT 261 #define LOAD_METHOD 262 #define MAX_PSEUDO_OPCODE 262 -#define BINARY_OP_ADAPTIVE 3 #define BINARY_OP_ADD_FLOAT 5 #define BINARY_OP_ADD_INT 6 #define BINARY_OP_ADD_UNICODE 7 #define BINARY_OP_INPLACE_ADD_UNICODE 8 #define BINARY_OP_MULTIPLY_FLOAT 13 #define BINARY_OP_MULTIPLY_INT 14 -#define BINARY_OP_SUBTRACT_FLOAT 16 -#define BINARY_OP_SUBTRACT_INT 17 -#define BINARY_SUBSCR_ADAPTIVE 18 -#define BINARY_SUBSCR_DICT 19 -#define BINARY_SUBSCR_GETITEM 20 -#define BINARY_SUBSCR_LIST_INT 21 -#define BINARY_SUBSCR_TUPLE_INT 22 -#define CALL_ADAPTIVE 23 -#define CALL_PY_EXACT_ARGS 24 -#define CALL_PY_WITH_DEFAULTS 28 -#define CALL_BOUND_METHOD_EXACT_ARGS 29 -#define CALL_BUILTIN_CLASS 34 -#define CALL_BUILTIN_FAST_WITH_KEYWORDS 38 -#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 39 -#define CALL_NO_KW_BUILTIN_FAST 40 -#define CALL_NO_KW_BUILTIN_O 41 -#define CALL_NO_KW_ISINSTANCE 42 -#define CALL_NO_KW_LEN 43 -#define CALL_NO_KW_LIST_APPEND 44 -#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 45 -#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 46 -#define CALL_NO_KW_METHOD_DESCRIPTOR_O 47 -#define CALL_NO_KW_STR_1 48 -#define CALL_NO_KW_TUPLE_1 56 -#define CALL_NO_KW_TYPE_1 57 -#define COMPARE_OP_ADAPTIVE 58 -#define COMPARE_OP_FLOAT_JUMP 59 -#define COMPARE_OP_INT_JUMP 62 -#define COMPARE_OP_STR_JUMP 63 -#define EXTENDED_ARG_QUICK 64 -#define FOR_ITER_ADAPTIVE 65 +#define BINARY_OP_SUBTRACT_FLOAT 20 +#define BINARY_OP_SUBTRACT_INT 21 +#define BINARY_SUBSCR_DICT 22 +#define BINARY_SUBSCR_GETITEM 23 +#define BINARY_SUBSCR_LIST_INT 24 +#define BINARY_SUBSCR_TUPLE_INT 28 +#define CALL_PY_EXACT_ARGS 29 +#define CALL_PY_WITH_DEFAULTS 34 +#define CALL_BOUND_METHOD_EXACT_ARGS 38 +#define CALL_BUILTIN_CLASS 39 +#define CALL_BUILTIN_FAST_WITH_KEYWORDS 40 +#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 41 +#define CALL_NO_KW_BUILTIN_FAST 42 +#define CALL_NO_KW_BUILTIN_O 43 +#define CALL_NO_KW_ISINSTANCE 44 +#define CALL_NO_KW_LEN 45 +#define CALL_NO_KW_LIST_APPEND 46 +#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 47 +#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 48 +#define CALL_NO_KW_METHOD_DESCRIPTOR_O 56 +#define CALL_NO_KW_STR_1 57 +#define CALL_NO_KW_TUPLE_1 58 +#define CALL_NO_KW_TYPE_1 59 +#define COMPARE_OP_FLOAT_JUMP 62 +#define COMPARE_OP_INT_JUMP 64 +#define COMPARE_OP_STR_JUMP 65 #define FOR_ITER_LIST 66 -#define FOR_ITER_RANGE 67 -#define JUMP_BACKWARD_QUICK 72 -#define LOAD_ATTR_ADAPTIVE 73 +#define FOR_ITER_TUPLE 67 +#define FOR_ITER_RANGE 72 +#define FOR_ITER_GEN 73 #define LOAD_ATTR_CLASS 76 #define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 77 #define LOAD_ATTR_INSTANCE_VALUE 78 @@ -180,26 +182,21 @@ extern "C" { #define LOAD_ATTR_METHOD_NO_DICT 121 #define LOAD_ATTR_METHOD_WITH_DICT 141 #define LOAD_ATTR_METHOD_WITH_VALUES 143 -#define LOAD_CONST__LOAD_FAST 153 -#define LOAD_FAST__LOAD_CONST 154 -#define LOAD_FAST__LOAD_FAST 158 -#define LOAD_GLOBAL_ADAPTIVE 159 -#define LOAD_GLOBAL_BUILTIN 160 -#define LOAD_GLOBAL_MODULE 161 -#define RESUME_QUICK 166 -#define STORE_ATTR_ADAPTIVE 167 -#define STORE_ATTR_INSTANCE_VALUE 168 -#define STORE_ATTR_SLOT 169 -#define STORE_ATTR_WITH_HINT 170 -#define STORE_FAST__LOAD_FAST 173 -#define STORE_FAST__STORE_FAST 174 -#define STORE_SUBSCR_ADAPTIVE 175 -#define STORE_SUBSCR_DICT 176 -#define STORE_SUBSCR_LIST_INT 177 -#define UNPACK_SEQUENCE_ADAPTIVE 178 -#define UNPACK_SEQUENCE_LIST 179 -#define UNPACK_SEQUENCE_TUPLE 180 -#define UNPACK_SEQUENCE_TWO_TUPLE 181 +#define LOAD_CONST__LOAD_FAST 158 +#define LOAD_FAST__LOAD_CONST 159 +#define LOAD_FAST__LOAD_FAST 160 +#define LOAD_GLOBAL_BUILTIN 161 +#define LOAD_GLOBAL_MODULE 166 +#define STORE_ATTR_INSTANCE_VALUE 167 +#define STORE_ATTR_SLOT 168 +#define STORE_ATTR_WITH_HINT 169 +#define STORE_FAST__LOAD_FAST 170 +#define STORE_FAST__STORE_FAST 173 +#define STORE_SUBSCR_DICT 174 +#define STORE_SUBSCR_LIST_INT 175 +#define UNPACK_SEQUENCE_LIST 176 +#define UNPACK_SEQUENCE_TUPLE 177 +#define UNPACK_SEQUENCE_TWO_TUPLE 178 #define DO_TRACING 255 #define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\ @@ -240,6 +237,9 @@ extern "C" { #define NB_INPLACE_TRUE_DIVIDE 24 #define NB_INPLACE_XOR 25 +/* number of codewords for opcode+oparg(s) */ +#define OPSIZE 2 + #define IS_PSEUDO_OPCODE(op) (((op) >= MIN_PSEUDO_OPCODE) && ((op) <= MAX_PSEUDO_OPCODE)) diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 6827225c191412..3a7e3b47a88501 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 12 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_SERIAL 3 /* Version as a string */ -#define PY_VERSION "3.12.0a1+" +#define PY_VERSION "3.12.0a3+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Include/pyport.h b/Include/pyport.h index b3ff2f4882e90f..b1b2a74779691d 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -698,6 +698,15 @@ extern char * _getpty(int *, int, mode_t, int); # define _Py__has_builtin(x) 0 #endif +// _Py_TYPEOF(expr) gets the type of an expression. +// +// Example: _Py_TYPEOF(x) x_copy = (x); +// +// The macro is only defined if GCC or clang compiler is used. +#if defined(__GNUC__) || defined(__clang__) +# define _Py_TYPEOF(expr) __typeof__(expr) +#endif + /* A convenient way for code to know if sanitizers are enabled. */ #if defined(__has_feature) diff --git a/Include/pystats.h b/Include/pystats.h index 87e92aa4f05fa6..04630c9e0f92e5 100644 --- a/Include/pystats.h +++ b/Include/pystats.h @@ -65,8 +65,15 @@ typedef struct _object_stats { uint64_t dict_materialized_new_key; uint64_t dict_materialized_too_big; uint64_t dict_materialized_str_subclass; + uint64_t type_cache_hits; + uint64_t type_cache_misses; + uint64_t type_cache_dunder_hits; + uint64_t type_cache_dunder_misses; + uint64_t type_cache_collisions; } ObjectStats; +# + typedef struct _stats { OpcodeStats opcode_stats[256]; CallStats call_stats; diff --git a/Include/structmember.h b/Include/structmember.h index 65a777d5f52117..f6e8fd829892f4 100644 --- a/Include/structmember.h +++ b/Include/structmember.h @@ -5,69 +5,50 @@ extern "C" { #endif -/* Interface to map C struct members to Python object attributes */ - -#include <stddef.h> /* For offsetof */ - -/* An array of PyMemberDef structures defines the name, type and offset - of selected members of a C structure. These can be read by - PyMember_GetOne() and set by PyMember_SetOne() (except if their READONLY - flag is set). The array must be terminated with an entry whose name - pointer is NULL. */ - -struct PyMemberDef { - const char *name; - int type; - Py_ssize_t offset; - int flags; - const char *doc; -}; +/* Interface to map C struct members to Python object attributes + * + * This header is deprecated: new code should not use stuff from here. + * New definitions are in descrobject.h. + * + * However, there's nothing wrong with old code continuing to use it, + * and there's not much mainenance overhead in maintaining a few aliases. + * So, don't be too eager to convert old code. + * + * It uses names not prefixed with Py_. + * It is also *not* included from Python.h and must be included individually. + */ + +#include <stddef.h> /* For offsetof (not always provided by Python.h) */ /* Types */ -#define T_SHORT 0 -#define T_INT 1 -#define T_LONG 2 -#define T_FLOAT 3 -#define T_DOUBLE 4 -#define T_STRING 5 -#define T_OBJECT 6 -/* XXX the ordering here is weird for binary compatibility */ -#define T_CHAR 7 /* 1-character string */ -#define T_BYTE 8 /* 8-bit signed int */ -/* unsigned variants: */ -#define T_UBYTE 9 -#define T_USHORT 10 -#define T_UINT 11 -#define T_ULONG 12 - -/* Added by Jack: strings contained in the structure */ -#define T_STRING_INPLACE 13 - -/* Added by Lillo: bools contained in the structure (assumed char) */ -#define T_BOOL 14 - -#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError - when the value is NULL, instead of - converting to None. */ -#define T_LONGLONG 17 -#define T_ULONGLONG 18 - -#define T_PYSSIZET 19 /* Py_ssize_t */ -#define T_NONE 20 /* Value is always None */ - +#define T_SHORT Py_T_SHORT +#define T_INT Py_T_INT +#define T_LONG Py_T_LONG +#define T_FLOAT Py_T_FLOAT +#define T_DOUBLE Py_T_DOUBLE +#define T_STRING Py_T_STRING +#define T_OBJECT _Py_T_OBJECT +#define T_CHAR Py_T_CHAR +#define T_BYTE Py_T_BYTE +#define T_UBYTE Py_T_UBYTE +#define T_USHORT Py_T_USHORT +#define T_UINT Py_T_UINT +#define T_ULONG Py_T_ULONG +#define T_STRING_INPLACE Py_T_STRING_INPLACE +#define T_BOOL Py_T_BOOL +#define T_OBJECT_EX Py_T_OBJECT_EX +#define T_LONGLONG Py_T_LONGLONG +#define T_ULONGLONG Py_T_ULONGLONG +#define T_PYSSIZET Py_T_PYSSIZET +#define T_NONE _Py_T_NONE /* Flags */ -#define READONLY 1 -#define READ_RESTRICTED 2 -#define PY_WRITE_RESTRICTED 4 +#define READONLY Py_READONLY +#define PY_AUDIT_READ Py_AUDIT_READ +#define READ_RESTRICTED Py_AUDIT_READ +#define PY_WRITE_RESTRICTED _Py_WRITE_RESTRICTED #define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) -#define PY_AUDIT_READ READ_RESTRICTED - -/* Current API, use this */ -PyAPI_FUNC(PyObject *) PyMember_GetOne(const char *, PyMemberDef *); -PyAPI_FUNC(int) PyMember_SetOne(char *, PyMemberDef *, PyObject *); - #ifdef __cplusplus } diff --git a/Lib/_aix_support.py b/Lib/_aix_support.py index 1d8482ff382507..18533e769b7514 100644 --- a/Lib/_aix_support.py +++ b/Lib/_aix_support.py @@ -1,15 +1,9 @@ """Shared AIX support functions.""" +import subprocess import sys import sysconfig -try: - import subprocess -except ImportError: # pragma: no cover - # _aix_support is used in distutils by setup.py to build C extensions, - # before subprocess dependencies like _posixsubprocess are available. - import _bootsubprocess as subprocess - def _aix_tag(vrtl, bd): # type: (List[int], int) -> str diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 12510784c8b926..7f247ff47c9e61 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -638,10 +638,7 @@ def read(self, size=-1): def readall(self): """Read until EOF, using multiple read() call.""" res = bytearray() - while True: - data = self.read(DEFAULT_BUFFER_SIZE) - if not data: - break + while data := self.read(DEFAULT_BUFFER_SIZE): res += data if res: return bytes(res) @@ -1129,6 +1126,7 @@ def peek(self, size=0): do at most one raw read to satisfy it. We never return more than self.buffer_size. """ + self._checkClosed("peek of closed file") with self._read_lock: return self._peek_unlocked(size) @@ -1147,6 +1145,7 @@ def read1(self, size=-1): """Reads up to size bytes, with at most one read() system call.""" # Returns up to size bytes. If at least one byte is buffered, we # only return buffered bytes. Otherwise, we do one raw read. + self._checkClosed("read of closed file") if size < 0: size = self.buffer_size if size == 0: @@ -1164,6 +1163,8 @@ def read1(self, size=-1): def _readinto(self, buf, read1): """Read data into *buf* with at most one system call.""" + self._checkClosed("readinto of closed file") + # Need to create a memoryview object of type 'b', otherwise # we may not be able to assign bytes to it, and slicing it # would create a new object. @@ -1213,6 +1214,7 @@ def tell(self): def seek(self, pos, whence=0): if whence not in valid_seek_flags: raise ValueError("invalid whence value") + self._checkClosed("seek of closed file") with self._read_lock: if whence == 1: pos -= len(self._read_buf) - self._read_pos diff --git a/Lib/_pylong.py b/Lib/_pylong.py index e0d4e9042193e2..d14c1d93836327 100644 --- a/Lib/_pylong.py +++ b/Lib/_pylong.py @@ -16,8 +16,6 @@ import re import decimal -_DEBUG = False - def int_to_decimal(n): """Asymptotically fast conversion of an 'int' to Decimal.""" @@ -32,9 +30,6 @@ def int_to_decimal(n): # "clever" recursive way. If we want a string representation, we # apply str to _that_. - if _DEBUG: - print('int_to_decimal', n.bit_length(), file=sys.stderr) - D = decimal.Decimal D2 = D(2) @@ -141,8 +136,6 @@ def inner(a, b): def int_from_string(s): """Asymptotically fast version of PyLong_FromString(), conversion of a string of decimal digits into an 'int'.""" - if _DEBUG: - print('int_from_string', len(s), file=sys.stderr) # PyLong_FromString() has already removed leading +/-, checked for invalid # use of underscore characters, checked that string consists of only digits # and underscores, and stripped leading whitespace. The input can still @@ -281,8 +274,6 @@ def int_divmod(a, b): """Asymptotically fast replacement for divmod, for 'int'. Its time complexity is O(n**1.58), where n = #bits(a) + #bits(b). """ - if _DEBUG: - print('int_divmod', a.bit_length(), b.bit_length(), file=sys.stderr) if b == 0: raise ZeroDivisionError elif b < 0: diff --git a/Lib/argparse.py b/Lib/argparse.py index d2dcfdf5682f7c..240625ff01084e 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -1997,7 +1997,11 @@ def consume_optional(start_index): # arguments, try to parse more single-dash options out # of the tail of the option string chars = self.prefix_chars - if arg_count == 0 and option_string[1] not in chars: + if ( + arg_count == 0 + and option_string[1] not in chars + and explicit_arg != '' + ): action_tuples.append((action, [], option_string)) char = option_string[0] option_string = char + explicit_arg[0] diff --git a/Lib/ast.py b/Lib/ast.py index 1a94e9368c161a..2cbc80a9835aa5 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -237,6 +237,12 @@ def increment_lineno(node, n=1): location in a file. """ for child in walk(node): + # TypeIgnore is a special case where lineno is not an attribute + # but rather a field of the node itself. + if isinstance(child, TypeIgnore): + child.lineno = getattr(child, 'lineno', 0) + n + continue + if 'lineno' in child._attributes: child.lineno = getattr(child, 'lineno', 0) + n if ( diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index c8a2f9f25634ef..f2f93758c3a817 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -377,7 +377,7 @@ async def serve_forever(self): self._serving_forever_fut = None async def wait_closed(self): - if self._sockets is None or self._waiters is None: + if self._waiters is None or self._active_count == 0: return waiter = self._loop.create_future() self._waiters.append(waiter) @@ -986,6 +986,8 @@ async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None): if sock is not None: sock.close() raise + finally: + exceptions = my_exceptions = None async def create_connection( self, protocol_factory, host=None, port=None, @@ -1084,19 +1086,22 @@ async def create_connection( if sock is None: exceptions = [exc for sub in exceptions for exc in sub] - if all_errors: - raise ExceptionGroup("create_connection failed", exceptions) - if len(exceptions) == 1: - raise exceptions[0] - else: - # If they all have the same str(), raise one. - model = str(exceptions[0]) - if all(str(exc) == model for exc in exceptions): + try: + if all_errors: + raise ExceptionGroup("create_connection failed", exceptions) + if len(exceptions) == 1: raise exceptions[0] - # Raise a combined exception so the user can see all - # the various error messages. - raise OSError('Multiple exceptions: {}'.format( - ', '.join(str(exc) for exc in exceptions))) + else: + # If they all have the same str(), raise one. + model = str(exceptions[0]) + if all(str(exc) == model for exc in exceptions): + raise exceptions[0] + # Raise a combined exception so the user can see all + # the various error messages. + raise OSError('Multiple exceptions: {}'.format( + ', '.join(str(exc) for exc in exceptions))) + finally: + exceptions = None else: if sock is None: @@ -1904,6 +1909,8 @@ def _run_once(self): event_list = self._selector.select(timeout) self._process_events(event_list) + # Needed to break cycles when an exception occurs. + event_list = None # Handle 'later' callbacks that are ready. end_time = self.time() + self._clock_resolution diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index a327ba54a323a8..34a8869dff8def 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -13,6 +13,7 @@ import contextvars import os +import signal import socket import subprocess import sys @@ -618,7 +619,7 @@ def get_event_loop(self): Returns an event loop object implementing the BaseEventLoop interface, or raises an exception in case no event loop has been set for the - current context and the current policy does not specify to create one. + current context. It should never return None.""" raise NotImplementedError @@ -671,11 +672,6 @@ def get_event_loop(self): Returns an instance of EventLoop or raises an exception. """ - if (self._local._loop is None and - not self._local._set_called and - threading.current_thread() is threading.main_thread()): - self.set_event_loop(self.new_event_loop()) - if self._local._loop is None: raise RuntimeError('There is no current event loop in thread %r.' % threading.current_thread().name) @@ -785,16 +781,9 @@ def get_event_loop(): the result of `get_event_loop_policy().get_event_loop()` call. """ # NOTE: this function is implemented in C (see _asynciomodule.c) - return _py__get_event_loop() - - -def _get_event_loop(stacklevel=3): current_loop = _get_running_loop() if current_loop is not None: return current_loop - import warnings - warnings.warn('There is no current event loop', - DeprecationWarning, stacklevel=stacklevel) return get_event_loop_policy().get_event_loop() @@ -824,7 +813,6 @@ def set_child_watcher(watcher): _py__set_running_loop = _set_running_loop _py_get_running_loop = get_running_loop _py_get_event_loop = get_event_loop -_py__get_event_loop = _get_event_loop try: @@ -832,7 +820,7 @@ def set_child_watcher(watcher): # functions in asyncio. Pure Python implementation is # about 4 times slower than C-accelerated. from _asyncio import (_get_running_loop, _set_running_loop, - get_running_loop, get_event_loop, _get_event_loop) + get_running_loop, get_event_loop) except ImportError: pass else: @@ -841,4 +829,13 @@ def set_child_watcher(watcher): _c__set_running_loop = _set_running_loop _c_get_running_loop = get_running_loop _c_get_event_loop = get_event_loop - _c__get_event_loop = _get_event_loop + + +if hasattr(os, 'fork'): + def on_fork(): + # Reset the loop and wakeupfd in the forked child process. + if _event_loop_policy is not None: + _event_loop_policy._local = BaseDefaultEventLoopPolicy._Local() + signal.set_wakeup_fd(-1) + + os.register_at_fork(after_in_child=on_fork) diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index 3a6b44a0910869..97fc4e3fcb60ee 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -77,7 +77,7 @@ def __init__(self, *, loop=None): the default event loop. """ if loop is None: - self._loop = events._get_event_loop() + self._loop = events.get_event_loop() else: self._loop = loop self._callbacks = [] @@ -413,7 +413,7 @@ def wrap_future(future, *, loop=None): assert isinstance(future, concurrent.futures.Future), \ f'concurrent.futures.Future is expected, got {future!r}' if loop is None: - loop = events._get_event_loop() + loop = events.get_event_loop() new_future = loop.create_future() _chain_future(future, new_future) return new_future diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index c6aab408fc7410..1e2a730cf368a9 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -288,7 +288,8 @@ def _loop_reading(self, fut=None): # we got end-of-file so no need to reschedule a new read return - data = self._data[:length] + # It's a new slice so make it immutable so protocols upstream don't have problems + data = bytes(memoryview(self._data)[:length]) else: # the future will be replaced by next proactor.recv call fut.cancel() diff --git a/Lib/asyncio/runners.py b/Lib/asyncio/runners.py index b1c4dbd7619721..1b89236599aad7 100644 --- a/Lib/asyncio/runners.py +++ b/Lib/asyncio/runners.py @@ -157,7 +157,7 @@ def _on_sigint(self, signum, frame, main_task): raise KeyboardInterrupt() -def run(main, *, debug=None): +def run(main, *, debug=None, loop_factory=None): """Execute the coroutine and return the result. This function runs the passed coroutine, taking care of @@ -190,7 +190,7 @@ async def main(): raise RuntimeError( "asyncio.run() cannot be called from a running event loop") - with Runner(debug=debug) as runner: + with Runner(debug=debug, loop_factory=loop_factory) as runner: return runner.run(main) diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index bfa4590154f372..3d30006198f671 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -633,7 +633,11 @@ async def sock_connect(self, sock, address): fut = self.create_future() self._sock_connect(fut, sock, address) - return await fut + try: + return await fut + finally: + # Needed to break cycles when an exception occurs. + fut = None def _sock_connect(self, fut, sock, address): fd = sock.fileno() @@ -655,6 +659,8 @@ def _sock_connect(self, fut, sock, address): fut.set_exception(exc) else: fut.set_result(None) + finally: + fut = None def _sock_write_done(self, fd, fut, handle=None): if handle is None or not handle.cancelled(): @@ -678,6 +684,8 @@ def _sock_connect_cb(self, fut, sock, address): fut.set_exception(exc) else: fut.set_result(None) + finally: + fut = None async def sock_accept(self, sock): """Accept a connection. diff --git a/Lib/asyncio/sslproto.py b/Lib/asyncio/sslproto.py index 5cb5cd35883f07..bbf9cad6bc7f86 100644 --- a/Lib/asyncio/sslproto.py +++ b/Lib/asyncio/sslproto.py @@ -199,12 +199,6 @@ def get_read_buffer_size(self): """Return the current size of the read buffer.""" return self._ssl_protocol._get_read_buffer_size() - def get_write_buffer_limits(self): - """Get the high and low watermarks for write flow control. - Return a tuple (low, high) where low and high are - positive number of bytes.""" - return self._ssl_protocol._transport.get_write_buffer_limits() - @property def _protocol_paused(self): # Required for sendfile fallback pause_writing/resume_writing logic diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py index c4d837a1170819..0f9098b4195633 100644 --- a/Lib/asyncio/streams.py +++ b/Lib/asyncio/streams.py @@ -125,7 +125,7 @@ class FlowControlMixin(protocols.Protocol): def __init__(self, loop=None): if loop is None: - self._loop = events._get_event_loop(stacklevel=4) + self._loop = events.get_event_loop() else: self._loop = loop self._paused = False @@ -404,7 +404,7 @@ def __init__(self, limit=_DEFAULT_LIMIT, loop=None): self._limit = limit if loop is None: - self._loop = events._get_event_loop() + self._loop = events.get_event_loop() else: self._loop = loop self._buffer = bytearray() @@ -688,7 +688,7 @@ async def read(self, n=-1): await self._wait_for_data('read') # This will work right even if buffer is less than n bytes - data = bytes(self._buffer[:n]) + data = bytes(memoryview(self._buffer)[:n]) del self._buffer[:n] self._maybe_resume_transport() @@ -730,7 +730,7 @@ async def readexactly(self, n): data = bytes(self._buffer) self._buffer.clear() else: - data = bytes(self._buffer[:n]) + data = bytes(memoryview(self._buffer)[:n]) del self._buffer[:n] self._maybe_resume_transport() return data diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 571013745aa03a..fa853283c0c5e4 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -582,7 +582,7 @@ def as_completed(fs, *, timeout=None): from .queues import Queue # Import here to avoid circular import problem. done = Queue() - loop = events._get_event_loop() + loop = events.get_event_loop() todo = {ensure_future(f, loop=loop) for f in set(fs)} timeout_handle = None @@ -668,7 +668,7 @@ def _ensure_future(coro_or_future, *, loop=None): 'is required') if loop is None: - loop = events._get_event_loop(stacklevel=4) + loop = events.get_event_loop() try: return loop.create_task(coro_or_future) except RuntimeError: @@ -749,7 +749,7 @@ def gather(*coros_or_futures, return_exceptions=False): gather won't cancel any other awaitables. """ if not coros_or_futures: - loop = events._get_event_loop() + loop = events.get_event_loop() outer = loop.create_future() outer.set_result([]) return outer diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 2de7a1bfd68155..b21e0394141bf4 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -846,6 +846,13 @@ class AbstractChildWatcher: waitpid(-1), there should be only one active object per process. """ + def __init_subclass__(cls) -> None: + if cls.__module__ != __name__: + warnings._deprecated("AbstractChildWatcher", + "{name!r} is deprecated as of Python 3.12 and will be " + "removed in Python {remove}.", + remove=(3, 14)) + def add_child_handler(self, pid, callback, *args): """Register a new child handler. diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py index acc97daafecc0b..4dad436fb4187f 100644 --- a/Lib/asyncio/windows_events.py +++ b/Lib/asyncio/windows_events.py @@ -439,7 +439,11 @@ def select(self, timeout=None): self._poll(timeout) tmp = self._results self._results = [] - return tmp + try: + return tmp + finally: + # Needed to break cycles when an exception occurs. + tmp = None def _result(self, value): fut = self._loop.create_future() @@ -793,6 +797,8 @@ def _poll(self, timeout=None): else: f.set_result(value) self._results.append(f) + finally: + f = None # Remove unregistered futures for ov in self._unregistered: diff --git a/Lib/base64.py b/Lib/base64.py index 30796a6fd6d05a..95dc7b0086051b 100755 --- a/Lib/base64.py +++ b/Lib/base64.py @@ -508,14 +508,8 @@ def b85decode(b): def encode(input, output): """Encode a file; input and output are binary files.""" - while True: - s = input.read(MAXBINSIZE) - if not s: - break - while len(s) < MAXBINSIZE: - ns = input.read(MAXBINSIZE-len(s)) - if not ns: - break + while s := input.read(MAXBINSIZE): + while len(s) < MAXBINSIZE and (ns := input.read(MAXBINSIZE-len(s))): s += ns line = binascii.b2a_base64(s) output.write(line) @@ -523,10 +517,7 @@ def encode(input, output): def decode(input, output): """Decode a file; input and output are binary files.""" - while True: - line = input.readline() - if not line: - break + while line := input.readline(): s = binascii.a2b_base64(line) output.write(s) diff --git a/Lib/copyreg.py b/Lib/copyreg.py index c8a52a2dc63a27..578392409b403c 100644 --- a/Lib/copyreg.py +++ b/Lib/copyreg.py @@ -25,16 +25,10 @@ def constructor(object): # Example: provide pickling support for complex numbers. -try: - complex -except NameError: - pass -else: +def pickle_complex(c): + return complex, (c.real, c.imag) - def pickle_complex(c): - return complex, (c.real, c.imag) - - pickle(complex, pickle_complex, complex) +pickle(complex, pickle_complex, complex) def pickle_union(obj): import functools, operator diff --git a/Lib/csv.py b/Lib/csv.py index 0de5656a4eed7b..4ef8be45ca9e0a 100644 --- a/Lib/csv.py +++ b/Lib/csv.py @@ -139,7 +139,8 @@ def __init__(self, f, fieldnames, restval="", extrasaction="raise", fieldnames = list(fieldnames) self.fieldnames = fieldnames # list of keys for the dict self.restval = restval # for writing short dicts - if extrasaction.lower() not in ("raise", "ignore"): + extrasaction = extrasaction.lower() + if extrasaction not in ("raise", "ignore"): raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'" % extrasaction) self.extrasaction = extrasaction @@ -165,11 +166,6 @@ def writerows(self, rowdicts): __class_getitem__ = classmethod(types.GenericAlias) -# Guard Sniffer's type checking against builds that exclude complex() -try: - complex -except NameError: - complex = float class Sniffer: ''' diff --git a/Lib/ctypes/_aix.py b/Lib/ctypes/_aix.py index fc3e95cbcc88a5..ee790f713a9ee3 100644 --- a/Lib/ctypes/_aix.py +++ b/Lib/ctypes/_aix.py @@ -108,12 +108,8 @@ def get_ld_headers(file): p = Popen(["/usr/bin/dump", f"-X{AIX_ABI}", "-H", file], universal_newlines=True, stdout=PIPE, stderr=DEVNULL) # be sure to read to the end-of-file - getting all entries - while True: - ld_header = get_ld_header(p) - if ld_header: - ldr_headers.append((ld_header, get_ld_header_info(p))) - else: - break + while ld_header := get_ld_header(p): + ldr_headers.append((ld_header, get_ld_header_info(p))) p.stdout.close() p.wait() return ldr_headers diff --git a/Lib/datetime.py b/Lib/datetime.py index 01742680a95bb6..1b0c5cb2d1c6ff 100644 --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1032,13 +1032,13 @@ def ctime(self): _MONTHNAMES[self._month], self._day, self._year) - def strftime(self, fmt): + def strftime(self, format): """ Format using strftime(). Example: "%d/%m/%Y, %H:%M:%S" """ - return _wrap_strftime(self, fmt, self.timetuple()) + return _wrap_strftime(self, format, self.timetuple()) def __format__(self, fmt): if not isinstance(fmt, str): diff --git a/Lib/dis.py b/Lib/dis.py index a045d18241b465..682ddc71cb4b65 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -11,6 +11,7 @@ _cache_format, _inline_cache_entries, _nb_ops, + _opsize, _specializations, _specialized_instructions, ) @@ -346,11 +347,12 @@ def get_instructions(x, *, first_line=None, show_caches=False, adaptive=False): line_offset = first_line - co.co_firstlineno else: line_offset = 0 + co_positions = _get_co_positions(co, show_caches=show_caches) return _get_instructions_bytes(_get_code_array(co, adaptive), co._varname_from_oparg, co.co_names, co.co_consts, linestarts, line_offset, - co_positions=co.co_positions(), + co_positions=co_positions, show_caches=show_caches) def _get_const_value(op, arg, co_consts): @@ -422,6 +424,25 @@ def _parse_exception_table(code): def _is_backward_jump(op): return 'JUMP_BACKWARD' in opname[op] +def _get_co_positions(code, show_caches=False): + # generate all co_positions, with or without caches, + # skipping the oparg2, oparg3 codewords. + + if code is None: + return iter(()) + + ops = code.co_code[::2] + prev_op = 0 + for op, positions in zip(ops, code.co_positions()): + assert _opsize in (1, 2) + if _opsize == 2 and prev_op != 0: + # skip oparg2, oparg3 + prev_op = op + continue + if show_caches or op != CACHE: + yield positions + prev_op = op + def _get_instructions_bytes(code, varname_from_oparg=None, names=None, co_consts=None, linestarts=None, line_offset=0, @@ -476,7 +497,7 @@ def _get_instructions_bytes(code, varname_from_oparg=None, argrepr = "to " + repr(argval) elif deop in hasjrel: signed_arg = -arg if _is_backward_jump(deop) else arg - argval = offset + 2 + signed_arg*2 + argval = offset + (signed_arg + _opsize) * 2 if deop == FOR_ITER: argval += 2 argrepr = "to " + repr(argval) @@ -504,17 +525,13 @@ def _get_instructions_bytes(code, varname_from_oparg=None, if not caches: continue if not show_caches: - # We still need to advance the co_positions iterator: - for _ in range(caches): - next(co_positions, ()) continue for name, size in _cache_format[opname[deop]].items(): for i in range(size): offset += 2 # Only show the fancy argrepr for a CACHE instruction when it's - # the first entry for a particular cache value and the - # instruction using it is actually quickened: - if i == 0 and op != deop: + # the first entry for a particular cache value: + if i == 0: data = code[offset: offset + 2 * size] argrepr = f"{name}: {int.from_bytes(data, sys.byteorder)}" else: @@ -528,11 +545,12 @@ def disassemble(co, lasti=-1, *, file=None, show_caches=False, adaptive=False): """Disassemble a code object.""" linestarts = dict(findlinestarts(co)) exception_entries = _parse_exception_table(co) + co_positions = _get_co_positions(co, show_caches=show_caches) _disassemble_bytes(_get_code_array(co, adaptive), lasti, co._varname_from_oparg, co.co_names, co.co_consts, linestarts, file=file, exception_entries=exception_entries, - co_positions=co.co_positions(), show_caches=show_caches) + co_positions=co_positions, show_caches=show_caches) def _disassemble_recursive(co, *, file=None, depth=None, show_caches=False, adaptive=False): disassemble(co, file=file, show_caches=show_caches, adaptive=adaptive) @@ -610,6 +628,7 @@ def _unpack_opargs(code): op = code[i] deop = _deoptop(op) caches = _inline_cache_entries[deop] + caches += _opsize - 1 # also skip over oparg2, oparg3 if deop in hasarg: arg = code[i+1] | extended_arg extended_arg = (arg << 8) if deop == EXTENDED_ARG else 0 @@ -636,7 +655,7 @@ def findlabels(code): if deop in hasjrel: if _is_backward_jump(deop): arg = -arg - label = offset + 2 + arg*2 + label = offset + (arg + _opsize) * 2 if deop == FOR_ITER: label += 2 elif deop in hasjabs: @@ -723,13 +742,14 @@ def __init__(self, x, *, first_line=None, current_offset=None, show_caches=False def __iter__(self): co = self.codeobj + co_positions = _get_co_positions(co, show_caches=self.show_caches) return _get_instructions_bytes(_get_code_array(co, self.adaptive), co._varname_from_oparg, co.co_names, co.co_consts, self._linestarts, line_offset=self._line_offset, exception_entries=self.exception_entries, - co_positions=co.co_positions(), + co_positions=co_positions, show_caches=self.show_caches) def __repr__(self): @@ -757,6 +777,7 @@ def dis(self): else: offset = -1 with io.StringIO() as output: + co_positions=_get_co_positions(co, show_caches=self.show_caches) _disassemble_bytes(_get_code_array(co, self.adaptive), varname_from_oparg=co._varname_from_oparg, names=co.co_names, co_consts=co.co_consts, @@ -765,7 +786,7 @@ def dis(self): file=output, lasti=offset, exception_entries=self.exception_entries, - co_positions=co.co_positions(), + co_positions=co_positions, show_caches=self.show_caches) return output.getvalue() diff --git a/Lib/distutils/README b/Lib/distutils/README deleted file mode 100644 index 73bd25187c0270..00000000000000 --- a/Lib/distutils/README +++ /dev/null @@ -1,11 +0,0 @@ -This directory contains the Distutils package. - -There's a full documentation available at: - - https://docs.python.org/distutils/ - -The Distutils-SIG web page is also a good starting point: - - https://www.python.org/sigs/distutils-sig/ - -$Id$ diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py deleted file mode 100644 index fdad6f65a78562..00000000000000 --- a/Lib/distutils/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -"""distutils - -The main package for the Python Module Distribution Utilities. Normally -used from a setup script as - - from distutils.core import setup - - setup (...) -""" - -import sys -import warnings - -__version__ = sys.version[:sys.version.index(' ')] - -_DEPRECATION_MESSAGE = ("The distutils package is deprecated and slated for " - "removal in Python 3.12. Use setuptools or check " - "PEP 632 for potential alternatives") -warnings.warn(_DEPRECATION_MESSAGE, - DeprecationWarning, 2) diff --git a/Lib/distutils/_msvccompiler.py b/Lib/distutils/_msvccompiler.py deleted file mode 100644 index af8099a4078192..00000000000000 --- a/Lib/distutils/_msvccompiler.py +++ /dev/null @@ -1,539 +0,0 @@ -"""distutils._msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for Microsoft Visual Studio 2015. - -The module is compatible with VS 2015 and later. You can find legacy support -for older versions in distutils.msvc9compiler and distutils.msvccompiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS 2005 and VS 2008 by Christian Heimes -# ported to VS 2015 by Steve Dower - -import os -import subprocess -import winreg - -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform - -from itertools import count - -def _find_vc2015(): - try: - key = winreg.OpenKeyEx( - winreg.HKEY_LOCAL_MACHINE, - r"Software\Microsoft\VisualStudio\SxS\VC7", - access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY - ) - except OSError: - log.debug("Visual C++ is not registered") - return None, None - - best_version = 0 - best_dir = None - with key: - for i in count(): - try: - v, vc_dir, vt = winreg.EnumValue(key, i) - except OSError: - break - if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir): - try: - version = int(float(v)) - except (ValueError, TypeError): - continue - if version >= 14 and version > best_version: - best_version, best_dir = version, vc_dir - return best_version, best_dir - -def _find_vc2017(): - """Returns "15, path" based on the result of invoking vswhere.exe - If no install is found, returns "None, None" - - The version is returned to avoid unnecessarily changing the function - result. It may be ignored when the path is not None. - - If vswhere.exe is not available, by definition, VS 2017 is not - installed. - """ - root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") - if not root: - return None, None - - try: - path = subprocess.check_output([ - os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-latest", - "-prerelease", - "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", "installationPath", - "-products", "*", - ], encoding="mbcs", errors="strict").strip() - except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): - return None, None - - path = os.path.join(path, "VC", "Auxiliary", "Build") - if os.path.isdir(path): - return 15, path - - return None, None - -PLAT_SPEC_TO_RUNTIME = { - 'x86' : 'x86', - 'x86_amd64' : 'x64', - 'x86_arm' : 'arm', - 'x86_arm64' : 'arm64' -} - -def _find_vcvarsall(plat_spec): - # bpo-38597: Removed vcruntime return value - _, best_dir = _find_vc2017() - - if not best_dir: - best_version, best_dir = _find_vc2015() - - if not best_dir: - log.debug("No suitable Visual C++ version found") - return None, None - - vcvarsall = os.path.join(best_dir, "vcvarsall.bat") - if not os.path.isfile(vcvarsall): - log.debug("%s cannot be found", vcvarsall) - return None, None - - return vcvarsall, None - -def _get_vc_env(plat_spec): - if os.getenv("DISTUTILS_USE_SDK"): - return { - key.lower(): value - for key, value in os.environ.items() - } - - vcvarsall, _ = _find_vcvarsall(plat_spec) - if not vcvarsall: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") - - try: - out = subprocess.check_output( - 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), - stderr=subprocess.STDOUT, - ).decode('utf-16le', errors='replace') - except subprocess.CalledProcessError as exc: - log.error(exc.output) - raise DistutilsPlatformError("Error executing {}" - .format(exc.cmd)) - - env = { - key.lower(): value - for key, _, value in - (line.partition('=') for line in out.splitlines()) - if key and value - } - - return env - -def _find_exe(exe, paths=None): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - if not paths: - paths = os.getenv('path').split(os.pathsep) - for p in paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - return exe - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Always cross-compile from x86 to work with the -# lighter-weight MSVC installs that do not include native 64-bit tools. -PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'x86_amd64', - 'win-arm32' : 'x86_arm', - 'win-arm64' : 'x86_arm64' -} - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - - def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.initialized = False - - def initialize(self, plat_name=None): - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - if plat_name not in PLAT_TO_VCVARS: - raise DistutilsPlatformError("--plat-name must be one of {}" - .format(tuple(PLAT_TO_VCVARS))) - - # Get the vcvarsall.bat spec for the requested platform. - plat_spec = PLAT_TO_VCVARS[plat_name] - - vc_env = _get_vc_env(plat_spec) - if not vc_env: - raise DistutilsPlatformError("Unable to find a compatible " - "Visual Studio installation.") - - self._paths = vc_env.get('path', '') - paths = self._paths.split(os.pathsep) - self.cc = _find_exe("cl.exe", paths) - self.linker = _find_exe("link.exe", paths) - self.lib = _find_exe("lib.exe", paths) - self.rc = _find_exe("rc.exe", paths) # resource compiler - self.mc = _find_exe("mc.exe", paths) # message compiler - self.mt = _find_exe("mt.exe", paths) # message compiler - - for dir in vc_env.get('include', '').split(os.pathsep): - if dir: - self.add_include_dir(dir.rstrip(os.sep)) - - for dir in vc_env.get('lib', '').split(os.pathsep): - if dir: - self.add_library_dir(dir.rstrip(os.sep)) - - self.preprocess_options = None - # bpo-38597: Always compile with dynamic linking - # Future releases of Python 3.x will include all past - # versions of vcruntime*.dll for compatibility. - self.compile_options = [ - '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD' - ] - - self.compile_options_debug = [ - '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' - ] - - ldflags = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG' - ] - - ldflags_debug = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' - ] - - self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] - self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] - self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_static = [*ldflags] - self.ldflags_static_debug = [*ldflags_debug] - - self._ldflags = { - (CCompiler.EXECUTABLE, None): self.ldflags_exe, - (CCompiler.EXECUTABLE, False): self.ldflags_exe, - (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug, - (CCompiler.SHARED_OBJECT, None): self.ldflags_shared, - (CCompiler.SHARED_OBJECT, False): self.ldflags_shared, - (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug, - (CCompiler.SHARED_LIBRARY, None): self.ldflags_static, - (CCompiler.SHARED_LIBRARY, False): self.ldflags_static, - (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug, - } - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - ext_map = { - **{ext: self.obj_extension for ext in self.src_extensions}, - **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, - } - - output_dir = output_dir or '' - - def make_out_path(p): - base, ext = os.path.splitext(p) - if strip_dir: - base = os.path.basename(base) - else: - _, base = os.path.splitdrive(base) - if base.startswith((os.path.sep, os.path.altsep)): - base = base[1:] - try: - # XXX: This may produce absurdly long paths. We should check - # the length of the result and trim base until we fit within - # 260 characters. - return os.path.join(output_dir, base + ext_map[ext]) - except LookupError: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError("Don't know how to compile {}".format(p)) - - return list(map(make_out_path, source_filenames)) - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - - add_cpp_opts = False - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - add_cpp_opts = True - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + [output_opt, input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) - base, _ = os.path.splitext(os.path.basename (src)) - rc_file = os.path.join(rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc, "/fo" + obj, rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile {} to {}" - .format(src, obj)) - - args = [self.cc] + compile_opts + pp_opts - if add_cpp_opts: - args.append('/EHsc') - args.append(input_opt) - args.append("/Fo" + obj) - args.extend(extra_postargs) - - try: - self.spawn(args) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - libraries, library_dirs, runtime_library_dirs = fixed_args - - if runtime_library_dirs: - self.warn("I don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - ldflags = self._ldflags[target_desc, debug] - - export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - output_dir = os.path.dirname(os.path.abspath(output_filename)) - self.mkpath(output_dir) - try: - log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args)) - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def spawn(self, cmd): - old_path = os.getenv('path') - try: - os.environ['path'] = self._paths - return super().spawn(cmd) - finally: - os.environ['path'] = old_path - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC") - - def library_option(self, lib): - return self.library_filename(lib) - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.isfile(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None diff --git a/Lib/distutils/archive_util.py b/Lib/distutils/archive_util.py deleted file mode 100644 index 565a3117b4b5e1..00000000000000 --- a/Lib/distutils/archive_util.py +++ /dev/null @@ -1,256 +0,0 @@ -"""distutils.archive_util - -Utility functions for creating archive files (tarballs, zip files, -that sort of thing).""" - -import os -from warnings import warn -import sys - -try: - import zipfile -except ImportError: - zipfile = None - - -from distutils.errors import DistutilsExecError -from distutils.spawn import spawn -from distutils.dir_util import mkpath -from distutils import log - -try: - from pwd import getpwnam -except ImportError: - getpwnam = None - -try: - from grp import getgrnam -except ImportError: - getgrnam = None - -def _get_gid(name): - """Returns a gid, given a group name.""" - if getgrnam is None or name is None: - return None - try: - result = getgrnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def _get_uid(name): - """Returns an uid, given a user name.""" - if getpwnam is None or name is None: - return None - try: - result = getpwnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, - owner=None, group=None): - """Create a (possibly compressed) tar file from all the files under - 'base_dir'. - - 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or - None. ("compress" will be deprecated in Python 3.2) - - 'owner' and 'group' can be used to define an owner and a group for the - archive that is being built. If not provided, the current owner and group - will be used. - - The output tar file will be named 'base_dir' + ".tar", possibly plus - the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). - - Returns the output filename. - """ - tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', - 'compress': ''} - compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', - 'compress': '.Z'} - - # flags for compression program, each element of list will be an argument - if compress is not None and compress not in compress_ext.keys(): - raise ValueError( - "bad value for 'compress': must be None, 'gzip', 'bzip2', " - "'xz' or 'compress'") - - archive_name = base_name + '.tar' - if compress != 'compress': - archive_name += compress_ext.get(compress, '') - - mkpath(os.path.dirname(archive_name), dry_run=dry_run) - - # creating the tarball - import tarfile # late import so Python build itself doesn't break - - log.info('Creating tar archive') - - uid = _get_uid(owner) - gid = _get_gid(group) - - def _set_uid_gid(tarinfo): - if gid is not None: - tarinfo.gid = gid - tarinfo.gname = group - if uid is not None: - tarinfo.uid = uid - tarinfo.uname = owner - return tarinfo - - if not dry_run: - tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) - try: - tar.add(base_dir, filter=_set_uid_gid) - finally: - tar.close() - - # compression using `compress` - if compress == 'compress': - warn("'compress' will be deprecated.", PendingDeprecationWarning) - # the option varies depending on the platform - compressed_name = archive_name + compress_ext[compress] - if sys.platform == 'win32': - cmd = [compress, archive_name, compressed_name] - else: - cmd = [compress, '-f', archive_name] - spawn(cmd, dry_run=dry_run) - return compressed_name - - return archive_name - -def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): - """Create a zip file from all the files under 'base_dir'. - - The output zip file will be named 'base_name' + ".zip". Uses either the - "zipfile" Python module (if available) or the InfoZIP "zip" utility - (if installed and found on the default search path). If neither tool is - available, raises DistutilsExecError. Returns the name of the output zip - file. - """ - zip_filename = base_name + ".zip" - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - - # If zipfile module is not available, try spawning an external - # 'zip' command. - if zipfile is None: - if verbose: - zipoptions = "-r" - else: - zipoptions = "-rq" - - try: - spawn(["zip", zipoptions, zip_filename, base_dir], - dry_run=dry_run) - except DistutilsExecError: - # XXX really should distinguish between "couldn't find - # external 'zip' command" and "zip failed". - raise DistutilsExecError(("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename) - - else: - log.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) - - if not dry_run: - try: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) - except RuntimeError: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_STORED) - - with zip: - if base_dir != os.curdir: - path = os.path.normpath(os.path.join(base_dir, '')) - zip.write(path, path) - log.info("adding '%s'", path) - for dirpath, dirnames, filenames in os.walk(base_dir): - for name in dirnames: - path = os.path.normpath(os.path.join(dirpath, name, '')) - zip.write(path, path) - log.info("adding '%s'", path) - for name in filenames: - path = os.path.normpath(os.path.join(dirpath, name)) - if os.path.isfile(path): - zip.write(path, path) - log.info("adding '%s'", path) - - return zip_filename - -ARCHIVE_FORMATS = { - 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), - 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), - 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), - 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), - 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), - 'zip': (make_zipfile, [],"ZIP file") - } - -def check_archive_formats(formats): - """Returns the first format from the 'format' list that is unknown. - - If all formats are known, returns None - """ - for format in formats: - if format not in ARCHIVE_FORMATS: - return format - return None - -def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, - dry_run=0, owner=None, group=None): - """Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "gztar", - "bztar", "xztar", or "ztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - save_cwd = os.getcwd() - if root_dir is not None: - log.debug("changing into '%s'", root_dir) - base_name = os.path.abspath(base_name) - if not dry_run: - os.chdir(root_dir) - - if base_dir is None: - base_dir = os.curdir - - kwargs = {'dry_run': dry_run} - - try: - format_info = ARCHIVE_FORMATS[format] - except KeyError: - raise ValueError("unknown archive format '%s'" % format) - - func = format_info[0] - for arg, val in format_info[1]: - kwargs[arg] = val - - if format != 'zip': - kwargs['owner'] = owner - kwargs['group'] = group - - try: - filename = func(base_name, base_dir, **kwargs) - finally: - if root_dir is not None: - log.debug("changing back to '%s'", save_cwd) - os.chdir(save_cwd) - - return filename diff --git a/Lib/distutils/bcppcompiler.py b/Lib/distutils/bcppcompiler.py deleted file mode 100644 index 071fea5d038cb0..00000000000000 --- a/Lib/distutils/bcppcompiler.py +++ /dev/null @@ -1,393 +0,0 @@ -"""distutils.bcppcompiler - -Contains BorlandCCompiler, an implementation of the abstract CCompiler class -for the Borland C++ compiler. -""" - -# This implementation by Lyle Johnson, based on the original msvccompiler.py -# module and using the directions originally published by Gordon Williams. - -# XXX looks like there's a LOT of overlap between these two classes: -# someone should sit down and factor out the common code as -# WindowsCCompiler! --GPW - - -import os -from distutils.errors import \ - DistutilsExecError, \ - CompileError, LibError, LinkError, UnknownFileError -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options -from distutils.file_util import write_file -from distutils.dep_util import newer -from distutils import log - -class BCPPCompiler(CCompiler) : - """Concrete class that implements an interface to the Borland C/C++ - compiler, as defined by the CCompiler abstract class. - """ - - compiler_type = 'bcpp' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = _c_extensions + _cpp_extensions - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - - def __init__ (self, - verbose=0, - dry_run=0, - force=0): - - CCompiler.__init__ (self, verbose, dry_run, force) - - # These executables are assumed to all be in the path. - # Borland doesn't seem to use any special registry settings to - # indicate their installation locations. - - self.cc = "bcc32.exe" - self.linker = "ilink32.exe" - self.lib = "tlib.exe" - - self.preprocess_options = None - self.compile_options = ['/tWM', '/O2', '/q', '/g0'] - self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] - - self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_static = [] - self.ldflags_exe = ['/Gn', '/q', '/x'] - self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] - - - # -- Worker methods ------------------------------------------------ - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - compile_opts = extra_preargs or [] - compile_opts.append ('-c') - if debug: - compile_opts.extend (self.compile_options_debug) - else: - compile_opts.extend (self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - # XXX why do the normpath here? - src = os.path.normpath(src) - obj = os.path.normpath(obj) - # XXX _setup_compile() did a mkpath() too but before the normpath. - # Is it possible to skip the normpath? - self.mkpath(os.path.dirname(obj)) - - if ext == '.res': - # This is already a binary file -- skip it. - continue # the 'for' loop - if ext == '.rc': - # This needs to be compiled to a .res file -- do it now. - try: - self.spawn (["brcc32", "-fo", obj, src]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue # the 'for' loop - - # The next two are both for the real compiler. - if ext in self._c_extensions: - input_opt = "" - elif ext in self._cpp_extensions: - input_opt = "-P" - else: - # Unknown file type -- no extra options. The compiler - # will probably fail, but let it just in case this is a - # file the compiler recognizes even if we don't. - input_opt = "" - - output_opt = "-o" + obj - - # Compiler command line syntax is: "bcc32 [options] file(s)". - # Note that the source file names must appear at the end of - # the command line. - try: - self.spawn ([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs + [src]) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - # compile () - - - def create_static_lib (self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - output_filename = \ - self.library_filename (output_libname, output_dir=output_dir) - - if self._need_link (objects, output_filename): - lib_args = [output_filename, '/u'] + objects - if debug: - pass # XXX what goes here? - try: - self.spawn ([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # create_static_lib () - - - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - # XXX this ignores 'build_temp'! should follow the lead of - # msvccompiler.py - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - (libraries, library_dirs, runtime_library_dirs) = \ - self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) - - if runtime_library_dirs: - log.warn("I don't know what to do with 'runtime_library_dirs': %s", - str(runtime_library_dirs)) - - if output_dir is not None: - output_filename = os.path.join (output_dir, output_filename) - - if self._need_link (objects, output_filename): - - # Figure out linker args based on type of target. - if target_desc == CCompiler.EXECUTABLE: - startup_obj = 'c0w32' - if debug: - ld_args = self.ldflags_exe_debug[:] - else: - ld_args = self.ldflags_exe[:] - else: - startup_obj = 'c0d32' - if debug: - ld_args = self.ldflags_shared_debug[:] - else: - ld_args = self.ldflags_shared[:] - - - # Create a temporary exports file for use by the linker - if export_symbols is None: - def_file = '' - else: - head, tail = os.path.split (output_filename) - modname, ext = os.path.splitext (tail) - temp_dir = os.path.dirname(objects[0]) # preserve tree structure - def_file = os.path.join (temp_dir, '%s.def' % modname) - contents = ['EXPORTS'] - for sym in (export_symbols or []): - contents.append(' %s=_%s' % (sym, sym)) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # Borland C++ has problems with '/' in paths - objects2 = map(os.path.normpath, objects) - # split objects in .obj and .res files - # Borland C++ needs them at different positions in the command line - objects = [startup_obj] - resources = [] - for file in objects2: - (base, ext) = os.path.splitext(os.path.normcase(file)) - if ext == '.res': - resources.append(file) - else: - objects.append(file) - - - for l in library_dirs: - ld_args.append("/L%s" % os.path.normpath(l)) - ld_args.append("/L.") # we sometimes use relative paths - - # list of object files - ld_args.extend(objects) - - # XXX the command-line syntax for Borland C++ is a bit wonky; - # certain filenames are jammed together in one big string, but - # comma-delimited. This doesn't mesh too well with the - # Unix-centric attitude (with a DOS/Windows quoting hack) of - # 'spawn()', so constructing the argument list is a bit - # awkward. Note that doing the obvious thing and jamming all - # the filenames and commas into one argument would be wrong, - # because 'spawn()' would quote any filenames with spaces in - # them. Arghghh!. Apparently it works fine as coded... - - # name of dll/exe file - ld_args.extend([',',output_filename]) - # no map file and start libraries - ld_args.append(',,') - - for lib in libraries: - # see if we find it and if there is a bcpp specific lib - # (xxx_bcpp.lib) - libfile = self.find_library_file(library_dirs, lib, debug) - if libfile is None: - ld_args.append(lib) - # probably a BCPP internal library -- don't warn - else: - # full name which prefers bcpp_xxx.lib over xxx.lib - ld_args.append(libfile) - - # some default libraries - ld_args.append ('import32') - ld_args.append ('cw32mt') - - # def file for export symbols - ld_args.extend([',',def_file]) - # add resource files - ld_args.append(',') - ld_args.extend(resources) - - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath (os.path.dirname (output_filename)) - try: - self.spawn ([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # link () - - # -- Miscellaneous methods ----------------------------------------- - - - def find_library_file (self, dirs, lib, debug=0): - # List of effective library names to try, in order of preference: - # xxx_bcpp.lib is better than xxx.lib - # and xxx_d.lib is better than xxx.lib if debug is set - # - # The "_bcpp" suffix is to handle a Python installation for people - # with multiple compilers (primarily Distutils hackers, I suspect - # ;-). The idea is they'd have one static library for each - # compiler they care about, since (almost?) every Windows compiler - # seems to have a different format for static libraries. - if debug: - dlib = (lib + "_d") - try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) - else: - try_names = (lib + "_bcpp", lib) - - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # overwrite the one from CCompiler to support rc and res-files - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - (base, ext) = os.path.splitext (os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext == '.res': - # these can go unchanged - obj_names.append (os.path.join (output_dir, base + ext)) - elif ext == '.rc': - # these need to be compiled to .res-files - obj_names.append (os.path.join (output_dir, base + '.res')) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - # object_filenames () - - def preprocess (self, - source, - output_file=None, - macros=None, - include_dirs=None, - extra_preargs=None, - extra_postargs=None): - - (_, macros, include_dirs) = \ - self._fix_compile_args(None, macros, include_dirs) - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = ['cpp32.exe'] + pp_opts - if output_file is not None: - pp_args.append('-o' + output_file) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or the - # source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except DistutilsExecError as msg: - print(msg) - raise CompileError(msg) - - # preprocess() diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py deleted file mode 100644 index 4c47f2ed245d4f..00000000000000 --- a/Lib/distutils/ccompiler.py +++ /dev/null @@ -1,1116 +0,0 @@ -"""distutils.ccompiler - -Contains CCompiler, an abstract base class that defines the interface -for the Distutils compiler abstraction model.""" - -import sys, os, re -from distutils.errors import * -from distutils.spawn import spawn -from distutils.file_util import move_file -from distutils.dir_util import mkpath -from distutils.dep_util import newer_group -from distutils.util import split_quoted, execute -from distutils import log - -class CCompiler: - """Abstract base class to define the interface that must be implemented - by real compiler classes. Also has some utility methods used by - several compiler classes. - - The basic idea behind a compiler abstraction class is that each - instance can be used for all the compile/link steps in building a - single project. Thus, attributes common to all of those compile and - link steps -- include directories, macros to define, libraries to link - against, etc. -- are attributes of the compiler instance. To allow for - variability in how individual files are treated, most of those - attributes may be varied on a per-compilation or per-link basis. - """ - - # 'compiler_type' is a class attribute that identifies this class. It - # keeps code that wants to know what kind of compiler it's dealing with - # from having to import all possible compiler classes just to do an - # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type' - # should really, really be one of the keys of the 'compiler_class' - # dictionary (see below -- used by the 'new_compiler()' factory - # function) -- authors of new compiler interface classes are - # responsible for updating 'compiler_class'! - compiler_type = None - - # XXX things not handled by this compiler abstraction model: - # * client can't provide additional options for a compiler, - # e.g. warning, optimization, debugging flags. Perhaps this - # should be the domain of concrete compiler abstraction classes - # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base - # class should have methods for the common ones. - # * can't completely override the include or library searchg - # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2". - # I'm not sure how widely supported this is even by Unix - # compilers, much less on other platforms. And I'm even less - # sure how useful it is; maybe for cross-compiling, but - # support for that is a ways off. (And anyways, cross - # compilers probably have a dedicated binary with the - # right paths compiled in. I hope.) - # * can't do really freaky things with the library list/library - # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against - # different versions of libfoo.a in different locations. I - # think this is useless without the ability to null out the - # library search path anyways. - - - # Subclasses that rely on the standard filename generation methods - # implemented below should override these; see the comment near - # those methods ('object_filenames()' et. al.) for details: - src_extensions = None # list of strings - obj_extension = None # string - static_lib_extension = None - shared_lib_extension = None # string - static_lib_format = None # format string - shared_lib_format = None # prob. same as static_lib_format - exe_extension = None # string - - # Default language settings. language_map is used to detect a source - # file or Extension target language, checking source filenames. - # language_order is used to detect the language precedence, when deciding - # what language to use when mixing source types. For example, if some - # extension has two files with ".c" extension, and one with ".cpp", it - # is still linked as c++. - language_map = {".c" : "c", - ".cc" : "c++", - ".cpp" : "c++", - ".cxx" : "c++", - ".m" : "objc", - } - language_order = ["c++", "objc", "c"] - - def __init__(self, verbose=0, dry_run=0, force=0): - self.dry_run = dry_run - self.force = force - self.verbose = verbose - - # 'output_dir': a common output directory for object, library, - # shared object, and shared library files - self.output_dir = None - - # 'macros': a list of macro definitions (or undefinitions). A - # macro definition is a 2-tuple (name, value), where the value is - # either a string or None (no explicit value). A macro - # undefinition is a 1-tuple (name,). - self.macros = [] - - # 'include_dirs': a list of directories to search for include files - self.include_dirs = [] - - # 'libraries': a list of libraries to include in any link - # (library names, not filenames: eg. "foo" not "libfoo.a") - self.libraries = [] - - # 'library_dirs': a list of directories to search for libraries - self.library_dirs = [] - - # 'runtime_library_dirs': a list of directories to search for - # shared libraries/objects at runtime - self.runtime_library_dirs = [] - - # 'objects': a list of object files (or similar, such as explicitly - # named library files) to include on any link - self.objects = [] - - for key in self.executables.keys(): - self.set_executable(key, self.executables[key]) - - def set_executables(self, **kwargs): - """Define the executables (and options for them) that will be run - to perform the various stages of compilation. The exact set of - executables that may be specified here depends on the compiler - class (via the 'executables' class attribute), but most will have: - compiler the C/C++ compiler - linker_so linker used to create shared objects and libraries - linker_exe linker used to create binary executables - archiver static library creator - - On platforms with a command-line (Unix, DOS/Windows), each of these - is a string that will be split into executable name and (optional) - list of arguments. (Splitting the string is done similarly to how - Unix shells operate: words are delimited by spaces, but quotes and - backslashes can override this. See - 'distutils.util.split_quoted()'.) - """ - - # Note that some CCompiler implementation classes will define class - # attributes 'cpp', 'cc', etc. with hard-coded executable names; - # this is appropriate when a compiler class is for exactly one - # compiler/OS combination (eg. MSVCCompiler). Other compiler - # classes (UnixCCompiler, in particular) are driven by information - # discovered at run-time, since there are many different ways to do - # basically the same things with Unix C compilers. - - for key in kwargs: - if key not in self.executables: - raise ValueError("unknown executable '%s' for class %s" % - (key, self.__class__.__name__)) - self.set_executable(key, kwargs[key]) - - def set_executable(self, key, value): - if isinstance(value, str): - setattr(self, key, split_quoted(value)) - else: - setattr(self, key, value) - - def _find_macro(self, name): - i = 0 - for defn in self.macros: - if defn[0] == name: - return i - i += 1 - return None - - def _check_macro_definitions(self, definitions): - """Ensures that every element of 'definitions' is a valid macro - definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do - nothing if all definitions are OK, raise TypeError otherwise. - """ - for defn in definitions: - if not (isinstance(defn, tuple) and - (len(defn) in (1, 2) and - (isinstance (defn[1], str) or defn[1] is None)) and - isinstance (defn[0], str)): - raise TypeError(("invalid macro definition '%s': " % defn) + \ - "must be tuple (string,), (string, string), or " + \ - "(string, None)") - - - # -- Bookkeeping methods ------------------------------------------- - - def define_macro(self, name, value=None): - """Define a preprocessor macro for all compilations driven by this - compiler object. The optional parameter 'value' should be a - string; if it is not supplied, then the macro will be defined - without an explicit value and the exact outcome depends on the - compiler used (XXX true? does ANSI say anything about this?) - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro (name) - if i is not None: - del self.macros[i] - - self.macros.append((name, value)) - - def undefine_macro(self, name): - """Undefine a preprocessor macro for all compilations driven by - this compiler object. If the same macro is defined by - 'define_macro()' and undefined by 'undefine_macro()' the last call - takes precedence (including multiple redefinitions or - undefinitions). If the macro is redefined/undefined on a - per-compilation basis (ie. in the call to 'compile()'), then that - takes precedence. - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro (name) - if i is not None: - del self.macros[i] - - undefn = (name,) - self.macros.append(undefn) - - def add_include_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - header files. The compiler is instructed to search directories in - the order in which they are supplied by successive calls to - 'add_include_dir()'. - """ - self.include_dirs.append(dir) - - def set_include_dirs(self, dirs): - """Set the list of directories that will be searched to 'dirs' (a - list of strings). Overrides any preceding calls to - 'add_include_dir()'; subsequence calls to 'add_include_dir()' add - to the list passed to 'set_include_dirs()'. This does not affect - any list of standard include directories that the compiler may - search by default. - """ - self.include_dirs = dirs[:] - - def add_library(self, libname): - """Add 'libname' to the list of libraries that will be included in - all links driven by this compiler object. Note that 'libname' - should *not* be the name of a file containing a library, but the - name of the library itself: the actual filename will be inferred by - the linker, the compiler, or the compiler class (depending on the - platform). - - The linker will be instructed to link against libraries in the - order they were supplied to 'add_library()' and/or - 'set_libraries()'. It is perfectly valid to duplicate library - names; the linker will be instructed to link against libraries as - many times as they are mentioned. - """ - self.libraries.append(libname) - - def set_libraries(self, libnames): - """Set the list of libraries to be included in all links driven by - this compiler object to 'libnames' (a list of strings). This does - not affect any standard system libraries that the linker may - include by default. - """ - self.libraries = libnames[:] - - def add_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - libraries specified to 'add_library()' and 'set_libraries()'. The - linker will be instructed to search for libraries in the order they - are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. - """ - self.library_dirs.append(dir) - - def set_library_dirs(self, dirs): - """Set the list of library search directories to 'dirs' (a list of - strings). This does not affect any standard library search path - that the linker may search by default. - """ - self.library_dirs = dirs[:] - - def add_runtime_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - shared libraries at runtime. - """ - self.runtime_library_dirs.append(dir) - - def set_runtime_library_dirs(self, dirs): - """Set the list of directories to search for shared libraries at - runtime to 'dirs' (a list of strings). This does not affect any - standard search path that the runtime linker may search by - default. - """ - self.runtime_library_dirs = dirs[:] - - def add_link_object(self, object): - """Add 'object' to the list of object files (or analogues, such as - explicitly named library files or the output of "resource - compilers") to be included in every link driven by this compiler - object. - """ - self.objects.append(object) - - def set_link_objects(self, objects): - """Set the list of object files (or analogues) to be included in - every link to 'objects'. This does not affect any standard object - files that the linker may include by default (such as system - libraries). - """ - self.objects = objects[:] - - - # -- Private utility methods -------------------------------------- - # (here for the convenience of subclasses) - - # Helper method to prep compiler in subclass compile() methods - - def _setup_compile(self, outdir, macros, incdirs, sources, depends, - extra): - """Process arguments and decide which source files to compile.""" - if outdir is None: - outdir = self.output_dir - elif not isinstance(outdir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if incdirs is None: - incdirs = self.include_dirs - elif isinstance(incdirs, (list, tuple)): - incdirs = list(incdirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - if extra is None: - extra = [] - - # Get the list of expected output (object) files - objects = self.object_filenames(sources, strip_dir=0, - output_dir=outdir) - assert len(objects) == len(sources) - - pp_opts = gen_preprocess_options(macros, incdirs) - - build = {} - for i in range(len(sources)): - src = sources[i] - obj = objects[i] - ext = os.path.splitext(src)[1] - self.mkpath(os.path.dirname(obj)) - build[obj] = (src, ext) - - return macros, objects, extra, pp_opts, build - - def _get_cc_args(self, pp_opts, debug, before): - # works for unixccompiler, cygwinccompiler - cc_args = pp_opts + ['-c'] - if debug: - cc_args[:0] = ['-g'] - if before: - cc_args[:0] = before - return cc_args - - def _fix_compile_args(self, output_dir, macros, include_dirs): - """Typecheck and fix-up some of the arguments to the 'compile()' - method, and return fixed-up values. Specifically: if 'output_dir' - is None, replaces it with 'self.output_dir'; ensures that 'macros' - is a list, and augments it with 'self.macros'; ensures that - 'include_dirs' is a list, and augments it with 'self.include_dirs'. - Guarantees that the returned values are of the correct type, - i.e. for 'output_dir' either string or None, and for 'macros' and - 'include_dirs' either list or None. - """ - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if include_dirs is None: - include_dirs = self.include_dirs - elif isinstance(include_dirs, (list, tuple)): - include_dirs = list(include_dirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - return output_dir, macros, include_dirs - - def _prep_compile(self, sources, output_dir, depends=None): - """Decide which source files must be recompiled. - - Determine the list of object files corresponding to 'sources', - and figure out which ones really need to be recompiled. - Return a list of all object files and a dictionary telling - which source files can be skipped. - """ - # Get the list of expected output (object) files - objects = self.object_filenames(sources, output_dir=output_dir) - assert len(objects) == len(sources) - - # Return an empty dict for the "which source files can be skipped" - # return value to preserve API compatibility. - return objects, {} - - def _fix_object_args(self, objects, output_dir): - """Typecheck and fix up some arguments supplied to various methods. - Specifically: ensure that 'objects' is a list; if output_dir is - None, replace with self.output_dir. Return fixed versions of - 'objects' and 'output_dir'. - """ - if not isinstance(objects, (list, tuple)): - raise TypeError("'objects' must be a list or tuple of strings") - objects = list(objects) - - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - return (objects, output_dir) - - def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): - """Typecheck and fix up some of the arguments supplied to the - 'link_*' methods. Specifically: ensure that all arguments are - lists, and augment them with their permanent versions - (eg. 'self.libraries' augments 'libraries'). Return a tuple with - fixed versions of all arguments. - """ - if libraries is None: - libraries = self.libraries - elif isinstance(libraries, (list, tuple)): - libraries = list (libraries) + (self.libraries or []) - else: - raise TypeError( - "'libraries' (if supplied) must be a list of strings") - - if library_dirs is None: - library_dirs = self.library_dirs - elif isinstance(library_dirs, (list, tuple)): - library_dirs = list (library_dirs) + (self.library_dirs or []) - else: - raise TypeError( - "'library_dirs' (if supplied) must be a list of strings") - - if runtime_library_dirs is None: - runtime_library_dirs = self.runtime_library_dirs - elif isinstance(runtime_library_dirs, (list, tuple)): - runtime_library_dirs = (list(runtime_library_dirs) + - (self.runtime_library_dirs or [])) - else: - raise TypeError("'runtime_library_dirs' (if supplied) " - "must be a list of strings") - - return (libraries, library_dirs, runtime_library_dirs) - - def _need_link(self, objects, output_file): - """Return true if we need to relink the files listed in 'objects' - to recreate 'output_file'. - """ - if self.force: - return True - else: - if self.dry_run: - newer = newer_group (objects, output_file, missing='newer') - else: - newer = newer_group (objects, output_file) - return newer - - def detect_language(self, sources): - """Detect the language of a given file, or list of files. Uses - language_map, and language_order to do the job. - """ - if not isinstance(sources, list): - sources = [sources] - lang = None - index = len(self.language_order) - for source in sources: - base, ext = os.path.splitext(source) - extlang = self.language_map.get(ext) - try: - extindex = self.language_order.index(extlang) - if extindex < index: - lang = extlang - index = extindex - except ValueError: - pass - return lang - - - # -- Worker methods ------------------------------------------------ - # (must be implemented by subclasses) - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): - """Preprocess a single C/C++ source file, named in 'source'. - Output will be written to file named 'output_file', or stdout if - 'output_file' not supplied. 'macros' is a list of macro - definitions as for 'compile()', which will augment the macros set - with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a - list of directory names that will be added to the default list. - - Raises PreprocessError on failure. - """ - pass - - def compile(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=0, extra_preargs=None, - extra_postargs=None, depends=None): - """Compile one or more source files. - - 'sources' must be a list of filenames, most likely C/C++ - files, but in reality anything that can be handled by a - particular compiler and compiler class (eg. MSVCCompiler can - handle resource files in 'sources'). Return a list of object - filenames, one per source filename in 'sources'. Depending on - the implementation, not all source files will necessarily be - compiled, but all corresponding object filenames will be - returned. - - If 'output_dir' is given, object files will be put under it, while - retaining their original path component. That is, "foo/bar.c" - normally compiles to "foo/bar.o" (for a Unix implementation); if - 'output_dir' is "build", then it would compile to - "build/foo/bar.o". - - 'macros', if given, must be a list of macro definitions. A macro - definition is either a (name, value) 2-tuple or a (name,) 1-tuple. - The former defines a macro; if the value is None, the macro is - defined without an explicit value. The 1-tuple case undefines a - macro. Later definitions/redefinitions/ undefinitions take - precedence. - - 'include_dirs', if given, must be a list of strings, the - directories to add to the default include file search path for this - compilation only. - - 'debug' is a boolean; if true, the compiler will be instructed to - output debug symbols in (or alongside) the object file(s). - - 'extra_preargs' and 'extra_postargs' are implementation- dependent. - On platforms that have the notion of a command-line (e.g. Unix, - DOS/Windows), they are most likely lists of strings: extra - command-line arguments to prepend/append to the compiler command - line. On other platforms, consult the implementation class - documentation. In any event, they are intended as an escape hatch - for those occasions when the abstract compiler framework doesn't - cut the mustard. - - 'depends', if given, is a list of filenames that all targets - depend on. If a source file is older than any file in - depends, then the source file will be recompiled. This - supports dependency tracking, but only at a coarse - granularity. - - Raises CompileError on failure. - """ - # A concrete compiler class can either override this method - # entirely or implement _compile(). - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) - - # Return *all* object filenames, not just the ones we just built. - return objects - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compile 'src' to product 'obj'.""" - # A concrete compiler class that does not override compile() - # should implement _compile(). - pass - - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=0, target_lang=None): - """Link a bunch of stuff together to create a static library file. - The "bunch of stuff" consists of the list of object files supplied - as 'objects', the extra object files supplied to - 'add_link_object()' and/or 'set_link_objects()', the libraries - supplied to 'add_library()' and/or 'set_libraries()', and the - libraries supplied as 'libraries' (if any). - - 'output_libname' should be a library name, not a filename; the - filename will be inferred from the library name. 'output_dir' is - the directory where the library file will be put. - - 'debug' is a boolean; if true, debugging information will be - included in the library (note that on most platforms, it is the - compile step where this matters: the 'debug' flag is included here - just for consistency). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LibError on failure. - """ - pass - - - # values for target_desc parameter in link() - SHARED_OBJECT = "shared_object" - SHARED_LIBRARY = "shared_library" - EXECUTABLE = "executable" - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - """Link a bunch of stuff together to create an executable or - shared library file. - - The "bunch of stuff" consists of the list of object files supplied - as 'objects'. 'output_filename' should be a filename. If - 'output_dir' is supplied, 'output_filename' is relative to it - (i.e. 'output_filename' can provide directory components if - needed). - - 'libraries' is a list of libraries to link against. These are - library names, not filenames, since they're translated into - filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" - on Unix and "foo.lib" on DOS/Windows). However, they can include a - directory component, which means the linker will look in that - specific directory rather than searching all the normal locations. - - 'library_dirs', if supplied, should be a list of directories to - search for libraries that were specified as bare library names - (ie. no directory component). These are on top of the system - default and those supplied to 'add_library_dir()' and/or - 'set_library_dirs()'. 'runtime_library_dirs' is a list of - directories that will be embedded into the shared library and used - to search for other shared libraries that *it* depends on at - run-time. (This may only be relevant on Unix.) - - 'export_symbols' is a list of symbols that the shared library will - export. (This appears to be relevant only on Windows.) - - 'debug' is as for 'compile()' and 'create_static_lib()', with the - slight distinction that it actually matters on most platforms (as - opposed to 'create_static_lib()', which includes a 'debug' flag - mostly for form's sake). - - 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except - of course that they supply command-line arguments for the - particular linker being used). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LinkError on failure. - """ - raise NotImplementedError - - - # Old 'link_*()' methods, rewritten to use the new 'link()' method. - - def link_shared_lib(self, - objects, - output_libname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_LIBRARY, objects, - self.library_filename(output_libname, lib_type='shared'), - output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_shared_object(self, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_OBJECT, objects, - output_filename, output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_executable(self, - objects, - output_progname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - target_lang=None): - self.link(CCompiler.EXECUTABLE, objects, - self.executable_filename(output_progname), output_dir, - libraries, library_dirs, runtime_library_dirs, None, - debug, extra_preargs, extra_postargs, None, target_lang) - - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function; there is - # no appropriate default implementation so subclasses should - # implement all of these. - - def library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for libraries. - """ - raise NotImplementedError - - def runtime_library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for runtime libraries. - """ - raise NotImplementedError - - def library_option(self, lib): - """Return the compiler option to add 'lib' to the list of libraries - linked into the shared library or executable. - """ - raise NotImplementedError - - def has_function(self, funcname, includes=None, include_dirs=None, - libraries=None, library_dirs=None): - """Return a boolean indicating whether funcname is supported on - the current platform. The optional arguments can be used to - augment the compilation environment. - """ - # this can't be included at module scope because it tries to - # import math which might not be available at that point - maybe - # the necessary logic should just be inlined? - import tempfile - if includes is None: - includes = [] - if include_dirs is None: - include_dirs = [] - if libraries is None: - libraries = [] - if library_dirs is None: - library_dirs = [] - fd, fname = tempfile.mkstemp(".c", funcname, text=True) - f = os.fdopen(fd, "w") - try: - for incl in includes: - f.write("""#include "%s"\n""" % incl) - f.write("""\ -int main (int argc, char **argv) { - %s(); - return 0; -} -""" % funcname) - finally: - f.close() - try: - objects = self.compile([fname], include_dirs=include_dirs) - except CompileError: - return False - - try: - self.link_executable(objects, "a.out", - libraries=libraries, - library_dirs=library_dirs) - except (LinkError, TypeError): - return False - return True - - def find_library_file (self, dirs, lib, debug=0): - """Search the specified list of directories for a static or shared - library file 'lib' and return the full path to that file. If - 'debug' true, look for a debugging version (if that makes sense on - the current platform). Return None if 'lib' wasn't found in any of - the specified directories. - """ - raise NotImplementedError - - # -- Filename generation methods ----------------------------------- - - # The default implementation of the filename generating methods are - # prejudiced towards the Unix/DOS/Windows view of the world: - # * object files are named by replacing the source file extension - # (eg. .c/.cpp -> .o/.obj) - # * library files (shared or static) are named by plugging the - # library name and extension into a format string, eg. - # "lib%s.%s" % (lib_name, ".a") for Unix static libraries - # * executables are named by appending an extension (possibly - # empty) to the program name: eg. progname + ".exe" for - # Windows - # - # To reduce redundant code, these methods expect to find - # several attributes in the current object (presumably defined - # as class attributes): - # * src_extensions - - # list of C/C++ source file extensions, eg. ['.c', '.cpp'] - # * obj_extension - - # object file extension, eg. '.o' or '.obj' - # * static_lib_extension - - # extension for static library files, eg. '.a' or '.lib' - # * shared_lib_extension - - # extension for shared library/object files, eg. '.so', '.dll' - # * static_lib_format - - # format string for generating static library filenames, - # eg. 'lib%s.%s' or '%s.%s' - # * shared_lib_format - # format string for generating shared library filenames - # (probably same as static_lib_format, since the extension - # is one of the intended parameters to the format string) - # * exe_extension - - # extension for executable files, eg. '' or '.exe' - - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - raise UnknownFileError( - "unknown file type '%s' (from '%s')" % (ext, src_name)) - if strip_dir: - base = os.path.basename(base) - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - - def shared_object_filename(self, basename, strip_dir=0, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + self.shared_lib_extension) - - def executable_filename(self, basename, strip_dir=0, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + (self.exe_extension or '')) - - def library_filename(self, libname, lib_type='static', # or 'shared' - strip_dir=0, output_dir=''): - assert output_dir is not None - if lib_type not in ("static", "shared", "dylib", "xcode_stub"): - raise ValueError( - "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") - fmt = getattr(self, lib_type + "_lib_format") - ext = getattr(self, lib_type + "_lib_extension") - - dir, base = os.path.split(libname) - filename = fmt % (base, ext) - if strip_dir: - dir = '' - - return os.path.join(output_dir, dir, filename) - - - # -- Utility methods ----------------------------------------------- - - def announce(self, msg, level=1): - log.debug(msg) - - def debug_print(self, msg): - from distutils.debug import DEBUG - if DEBUG: - print(msg) - - def warn(self, msg): - sys.stderr.write("warning: %s\n" % msg) - - def execute(self, func, args, msg=None, level=1): - execute(func, args, msg, self.dry_run) - - def spawn(self, cmd): - spawn(cmd, dry_run=self.dry_run) - - def move_file(self, src, dst): - return move_file(src, dst, dry_run=self.dry_run) - - def mkpath (self, name, mode=0o777): - mkpath(name, mode, dry_run=self.dry_run) - - -# Map a sys.platform/os.name ('posix', 'nt') to the default compiler -# type for that platform. Keys are interpreted as re match -# patterns. Order is important; platform mappings are preferred over -# OS names. -_default_compilers = ( - - # Platform string mappings - - # on a cygwin built python we can use gcc like an ordinary UNIXish - # compiler - ('cygwin.*', 'unix'), - - # OS name mappings - ('posix', 'unix'), - ('nt', 'msvc'), - - ) - -def get_default_compiler(osname=None, platform=None): - """Determine the default compiler to use for the given platform. - - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. - - The default values are os.name and sys.platform in case the - parameters are not given. - """ - if osname is None: - osname = os.name - if platform is None: - platform = sys.platform - for pattern, compiler in _default_compilers: - if re.match(pattern, platform) is not None or \ - re.match(pattern, osname) is not None: - return compiler - # Default to Unix compiler - return 'unix' - -# Map compiler types to (module_name, class_name) pairs -- ie. where to -# find the code that implements an interface to this compiler. (The module -# is assumed to be in the 'distutils' package.) -compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', - "standard UNIX-style compiler"), - 'msvc': ('_msvccompiler', 'MSVCCompiler', - "Microsoft Visual C++"), - 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', - "Cygwin port of GNU C Compiler for Win32"), - 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler', - "Mingw32 port of GNU C Compiler for Win32"), - 'bcpp': ('bcppcompiler', 'BCPPCompiler', - "Borland C++ Compiler"), - } - -def show_compilers(): - """Print list of available compilers (used by the "--help-compiler" - options to "build", "build_ext", "build_clib"). - """ - # XXX this "knows" that the compiler option it's describing is - # "--compiler", which just happens to be the case for the three - # commands that use it. - from distutils.fancy_getopt import FancyGetopt - compilers = [] - for compiler in compiler_class.keys(): - compilers.append(("compiler="+compiler, None, - compiler_class[compiler][2])) - compilers.sort() - pretty_printer = FancyGetopt(compilers) - pretty_printer.print_help("List of available compilers:") - - -def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): - """Generate an instance of some CCompiler subclass for the supplied - platform/compiler combination. 'plat' defaults to 'os.name' - (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler - for that platform. Currently only 'posix' and 'nt' are supported, and - the default compilers are "traditional Unix interface" (UnixCCompiler - class) and Visual C++ (MSVCCompiler class). Note that it's perfectly - possible to ask for a Unix compiler object under Windows, and a - Microsoft compiler object under Unix -- if you supply a value for - 'compiler', 'plat' is ignored. - """ - if plat is None: - plat = os.name - - try: - if compiler is None: - compiler = get_default_compiler(plat) - - (module_name, class_name, long_description) = compiler_class[compiler] - except KeyError: - msg = "don't know how to compile C/C++ code on platform '%s'" % plat - if compiler is not None: - msg = msg + " with '%s' compiler" % compiler - raise DistutilsPlatformError(msg) - - try: - module_name = "distutils." + module_name - __import__ (module_name) - module = sys.modules[module_name] - klass = vars(module)[class_name] - except ImportError: - raise DistutilsModuleError( - "can't compile C/C++ code: unable to load module '%s'" % \ - module_name) - except KeyError: - raise DistutilsModuleError( - "can't compile C/C++ code: unable to find class '%s' " - "in module '%s'" % (class_name, module_name)) - - # XXX The None is necessary to preserve backwards compatibility - # with classes that expect verbose to be the first positional - # argument. - return klass(None, dry_run, force) - - -def gen_preprocess_options(macros, include_dirs): - """Generate C pre-processor options (-D, -U, -I) as used by at least - two types of compilers: the typical Unix compiler and Visual C++. - 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) - means undefine (-U) macro 'name', and (name,value) means define (-D) - macro 'name' to 'value'. 'include_dirs' is just a list of directory - names to be added to the header file search path (-I). Returns a list - of command-line options suitable for either Unix compilers or Visual - C++. - """ - # XXX it would be nice (mainly aesthetic, and so we don't generate - # stupid-looking command lines) to go over 'macros' and eliminate - # redundant definitions/undefinitions (ie. ensure that only the - # latest mention of a particular macro winds up on the command - # line). I don't think it's essential, though, since most (all?) - # Unix C compilers only pay attention to the latest -D or -U - # mention of a macro on their command line. Similar situation for - # 'include_dirs'. I'm punting on both for now. Anyways, weeding out - # redundancies like this should probably be the province of - # CCompiler, since the data structures used are inherited from it - # and therefore common to all CCompiler classes. - pp_opts = [] - for macro in macros: - if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): - raise TypeError( - "bad macro definition '%s': " - "each element of 'macros' list must be a 1- or 2-tuple" - % macro) - - if len(macro) == 1: # undefine this macro - pp_opts.append("-U%s" % macro[0]) - elif len(macro) == 2: - if macro[1] is None: # define with no explicit value - pp_opts.append("-D%s" % macro[0]) - else: - # XXX *don't* need to be clever about quoting the - # macro value here, because we're going to avoid the - # shell at all costs when we spawn the command! - pp_opts.append("-D%s=%s" % macro) - - for dir in include_dirs: - pp_opts.append("-I%s" % dir) - return pp_opts - - -def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): - """Generate linker options for searching library directories and - linking with specific libraries. 'libraries' and 'library_dirs' are, - respectively, lists of library names (not filenames!) and search - directories. Returns a list of command-line options suitable for use - with some compiler (depending on the two format strings passed in). - """ - lib_opts = [] - - for dir in library_dirs: - lib_opts.append(compiler.library_dir_option(dir)) - - for dir in runtime_library_dirs: - opt = compiler.runtime_library_dir_option(dir) - if isinstance(opt, list): - lib_opts = lib_opts + opt - else: - lib_opts.append(opt) - - # XXX it's important that we *not* remove redundant library mentions! - # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to - # resolve all symbols. I just hope we never have to say "-lfoo obj.o - # -lbar" to get things to work -- that's certainly a possibility, but a - # pretty nasty way to arrange your C code. - - for lib in libraries: - (lib_dir, lib_name) = os.path.split(lib) - if lib_dir: - lib_file = compiler.find_library_file([lib_dir], lib_name) - if lib_file: - lib_opts.append(lib_file) - else: - compiler.warn("no library file corresponding to " - "'%s' found (skipping)" % lib) - else: - lib_opts.append(compiler.library_option (lib)) - return lib_opts diff --git a/Lib/distutils/cmd.py b/Lib/distutils/cmd.py deleted file mode 100644 index dba3191e58474c..00000000000000 --- a/Lib/distutils/cmd.py +++ /dev/null @@ -1,403 +0,0 @@ -"""distutils.cmd - -Provides the Command class, the base class for the command classes -in the distutils.command package. -""" - -import sys, os, re -from distutils.errors import DistutilsOptionError -from distutils import util, dir_util, file_util, archive_util, dep_util -from distutils import log - -class Command: - """Abstract base class for defining command classes, the "worker bees" - of the Distutils. A useful analogy for command classes is to think of - them as subroutines with local variables called "options". The options - are "declared" in 'initialize_options()' and "defined" (given their - final values, aka "finalized") in 'finalize_options()', both of which - must be defined by every command class. The distinction between the - two is necessary because option values might come from the outside - world (command line, config file, ...), and any options dependent on - other options must be computed *after* these outside influences have - been processed -- hence 'finalize_options()'. The "body" of the - subroutine, where it does all its work based on the values of its - options, is the 'run()' method, which must also be implemented by every - command class. - """ - - # 'sub_commands' formalizes the notion of a "family" of commands, - # eg. "install" as the parent with sub-commands "install_lib", - # "install_headers", etc. The parent of a family of commands - # defines 'sub_commands' as a class attribute; it's a list of - # (command_name : string, predicate : unbound_method | string | None) - # tuples, where 'predicate' is a method of the parent command that - # determines whether the corresponding command is applicable in the - # current situation. (Eg. we "install_headers" is only applicable if - # we have any C header files to install.) If 'predicate' is None, - # that command is always applicable. - # - # 'sub_commands' is usually defined at the *end* of a class, because - # predicates can be unbound methods, so they must already have been - # defined. The canonical example is the "install" command. - sub_commands = [] - - - # -- Creation/initialization methods ------------------------------- - - def __init__(self, dist): - """Create and initialize a new Command object. Most importantly, - invokes the 'initialize_options()' method, which is the real - initializer and depends on the actual command being - instantiated. - """ - # late import because of mutual dependence between these classes - from distutils.dist import Distribution - - if not isinstance(dist, Distribution): - raise TypeError("dist must be a Distribution instance") - if self.__class__ is Command: - raise RuntimeError("Command is an abstract class") - - self.distribution = dist - self.initialize_options() - - # Per-command versions of the global flags, so that the user can - # customize Distutils' behaviour command-by-command and let some - # commands fall back on the Distribution's behaviour. None means - # "not defined, check self.distribution's copy", while 0 or 1 mean - # false and true (duh). Note that this means figuring out the real - # value of each flag is a touch complicated -- hence "self._dry_run" - # will be handled by __getattr__, below. - # XXX This needs to be fixed. - self._dry_run = None - - # verbose is largely ignored, but needs to be set for - # backwards compatibility (I think)? - self.verbose = dist.verbose - - # Some commands define a 'self.force' option to ignore file - # timestamps, but methods defined *here* assume that - # 'self.force' exists for all commands. So define it here - # just to be safe. - self.force = None - - # The 'help' flag is just used for command-line parsing, so - # none of that complicated bureaucracy is needed. - self.help = 0 - - # 'finalized' records whether or not 'finalize_options()' has been - # called. 'finalize_options()' itself should not pay attention to - # this flag: it is the business of 'ensure_finalized()', which - # always calls 'finalize_options()', to respect/update it. - self.finalized = 0 - - # XXX A more explicit way to customize dry_run would be better. - def __getattr__(self, attr): - if attr == 'dry_run': - myval = getattr(self, "_" + attr) - if myval is None: - return getattr(self.distribution, attr) - else: - return myval - else: - raise AttributeError(attr) - - def ensure_finalized(self): - if not self.finalized: - self.finalize_options() - self.finalized = 1 - - # Subclasses must define: - # initialize_options() - # provide default values for all options; may be customized by - # setup script, by options from config file(s), or by command-line - # options - # finalize_options() - # decide on the final values for all options; this is called - # after all possible intervention from the outside world - # (command-line, option file, etc.) has been processed - # run() - # run the command: do whatever it is we're here to do, - # controlled by the command's various option values - - def initialize_options(self): - """Set default values for all the options that this command - supports. Note that these defaults may be overridden by other - commands, by the setup script, by config files, or by the - command-line. Thus, this is not the place to code dependencies - between options; generally, 'initialize_options()' implementations - are just a bunch of "self.foo = None" assignments. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - def finalize_options(self): - """Set final values for all the options that this command supports. - This is always called as late as possible, ie. after any option - assignments from the command-line or from other commands have been - done. Thus, this is the place to code option dependencies: if - 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as - long as 'foo' still has the same value it was assigned in - 'initialize_options()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - - def dump_options(self, header=None, indent=""): - from distutils.fancy_getopt import longopt_xlate - if header is None: - header = "command options for '%s':" % self.get_command_name() - self.announce(indent + header, level=log.INFO) - indent = indent + " " - for (option, _, _) in self.user_options: - option = option.translate(longopt_xlate) - if option[-1] == "=": - option = option[:-1] - value = getattr(self, option) - self.announce(indent + "%s = %s" % (option, value), - level=log.INFO) - - def run(self): - """A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command-line, and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - def announce(self, msg, level=1): - """If the current verbosity level is of greater than or equal to - 'level' print 'msg' to stdout. - """ - log.log(level, msg) - - def debug_print(self, msg): - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - from distutils.debug import DEBUG - if DEBUG: - print(msg) - sys.stdout.flush() - - - # -- Option validation methods ------------------------------------- - # (these are very handy in writing the 'finalize_options()' method) - # - # NB. the general philosophy here is to ensure that a particular option - # value meets certain type and value constraints. If not, we try to - # force it into conformance (eg. if we expect a list but have a string, - # split the string on comma and/or whitespace). If we can't force the - # option into conformance, raise DistutilsOptionError. Thus, command - # classes need do nothing more than (eg.) - # self.ensure_string_list('foo') - # and they can be guaranteed that thereafter, self.foo will be - # a list of strings. - - def _ensure_stringlike(self, option, what, default=None): - val = getattr(self, option) - if val is None: - setattr(self, option, default) - return default - elif not isinstance(val, str): - raise DistutilsOptionError("'%s' must be a %s (got `%s`)" - % (option, what, val)) - return val - - def ensure_string(self, option, default=None): - """Ensure that 'option' is a string; if not defined, set it to - 'default'. - """ - self._ensure_stringlike(option, "string", default) - - def ensure_string_list(self, option): - r"""Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - val = getattr(self, option) - if val is None: - return - elif isinstance(val, str): - setattr(self, option, re.split(r',\s*|\s+', val)) - else: - if isinstance(val, list): - ok = all(isinstance(v, str) for v in val) - else: - ok = False - if not ok: - raise DistutilsOptionError( - "'%s' must be a list of strings (got %r)" - % (option, val)) - - def _ensure_tested_string(self, option, tester, what, error_fmt, - default=None): - val = self._ensure_stringlike(option, what, default) - if val is not None and not tester(val): - raise DistutilsOptionError(("error in '%s' option: " + error_fmt) - % (option, val)) - - def ensure_filename(self, option): - """Ensure that 'option' is the name of an existing file.""" - self._ensure_tested_string(option, os.path.isfile, - "filename", - "'%s' does not exist or is not a file") - - def ensure_dirname(self, option): - self._ensure_tested_string(option, os.path.isdir, - "directory name", - "'%s' does not exist or is not a directory") - - - # -- Convenience methods for commands ------------------------------ - - def get_command_name(self): - if hasattr(self, 'command_name'): - return self.command_name - else: - return self.__class__.__name__ - - def set_undefined_options(self, src_cmd, *option_pairs): - """Set the values of any "undefined" options from corresponding - option values in some other command object. "Undefined" here means - "is None", which is the convention used to indicate that an option - has not been changed between 'initialize_options()' and - 'finalize_options()'. Usually called from 'finalize_options()' for - options that depend on some other command rather than another - option of the same command. 'src_cmd' is the other command from - which option values will be taken (a command object will be created - for it if necessary); the remaining arguments are - '(src_option,dst_option)' tuples which mean "take the value of - 'src_option' in the 'src_cmd' command object, and copy it to - 'dst_option' in the current command object". - """ - # Option_pairs: list of (src_option, dst_option) tuples - src_cmd_obj = self.distribution.get_command_obj(src_cmd) - src_cmd_obj.ensure_finalized() - for (src_option, dst_option) in option_pairs: - if getattr(self, dst_option) is None: - setattr(self, dst_option, getattr(src_cmd_obj, src_option)) - - def get_finalized_command(self, command, create=1): - """Wrapper around Distribution's 'get_command_obj()' method: find - (create if necessary and 'create' is true) the command object for - 'command', call its 'ensure_finalized()' method, and return the - finalized command object. - """ - cmd_obj = self.distribution.get_command_obj(command, create) - cmd_obj.ensure_finalized() - return cmd_obj - - # XXX rename to 'get_reinitialized_command()'? (should do the - # same in dist.py, if so) - def reinitialize_command(self, command, reinit_subcommands=0): - return self.distribution.reinitialize_command(command, - reinit_subcommands) - - def run_command(self, command): - """Run some other command: uses the 'run_command()' method of - Distribution, which creates and finalizes the command object if - necessary and then invokes its 'run()' method. - """ - self.distribution.run_command(command) - - def get_sub_commands(self): - """Determine the sub-commands that are relevant in the current - distribution (ie., that need to be run). This is based on the - 'sub_commands' class attribute: each tuple in that list may include - a method that we call to determine if the subcommand needs to be - run for the current distribution. Return a list of command names. - """ - commands = [] - for (cmd_name, method) in self.sub_commands: - if method is None or method(self): - commands.append(cmd_name) - return commands - - - # -- External world manipulation ----------------------------------- - - def warn(self, msg): - log.warn("warning: %s: %s\n", self.get_command_name(), msg) - - def execute(self, func, args, msg=None, level=1): - util.execute(func, args, msg, dry_run=self.dry_run) - - def mkpath(self, name, mode=0o777): - dir_util.mkpath(name, mode, dry_run=self.dry_run) - - def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, - link=None, level=1): - """Copy a file respecting verbose, dry-run and force flags. (The - former two default to whatever is in the Distribution object, and - the latter defaults to false for commands that don't define it.)""" - return file_util.copy_file(infile, outfile, preserve_mode, - preserve_times, not self.force, link, - dry_run=self.dry_run) - - def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, level=1): - """Copy an entire directory tree respecting verbose, dry-run, - and force flags. - """ - return dir_util.copy_tree(infile, outfile, preserve_mode, - preserve_times, preserve_symlinks, - not self.force, dry_run=self.dry_run) - - def move_file (self, src, dst, level=1): - """Move a file respecting dry-run flag.""" - return file_util.move_file(src, dst, dry_run=self.dry_run) - - def spawn(self, cmd, search_path=1, level=1): - """Spawn an external command respecting dry-run flag.""" - from distutils.spawn import spawn - spawn(cmd, search_path, dry_run=self.dry_run) - - def make_archive(self, base_name, format, root_dir=None, base_dir=None, - owner=None, group=None): - return archive_util.make_archive(base_name, format, root_dir, base_dir, - dry_run=self.dry_run, - owner=owner, group=group) - - def make_file(self, infiles, outfile, func, args, - exec_msg=None, skip_msg=None, level=1): - """Special case of 'execute()' for operations that process one or - more input files and generate one output file. Works just like - 'execute()', except the operation is skipped and a different - message printed if 'outfile' already exists and is newer than all - files listed in 'infiles'. If the command defined 'self.force', - and it is true, then the command is unconditionally run -- does no - timestamp checks. - """ - if skip_msg is None: - skip_msg = "skipping %s (inputs unchanged)" % outfile - - # Allow 'infiles' to be a single string - if isinstance(infiles, str): - infiles = (infiles,) - elif not isinstance(infiles, (list, tuple)): - raise TypeError( - "'infiles' must be a string, or a list or tuple of strings") - - if exec_msg is None: - exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) - - # If 'outfile' must be regenerated (either because it doesn't - # exist, is out-of-date, or the 'force' flag is true) then - # perform the action that presumably regenerates it - if self.force or dep_util.newer_group(infiles, outfile): - self.execute(func, args, exec_msg, level) - # Otherwise, print the "skip" message - else: - log.debug(skip_msg) diff --git a/Lib/distutils/command/__init__.py b/Lib/distutils/command/__init__.py deleted file mode 100644 index fd0bfae7ade6ff..00000000000000 --- a/Lib/distutils/command/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -"""distutils.command - -Package containing implementation of all the standard Distutils -commands.""" - -__all__ = ['build', - 'build_py', - 'build_ext', - 'build_clib', - 'build_scripts', - 'clean', - 'install', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - 'sdist', - 'register', - 'bdist', - 'bdist_dumb', - 'bdist_rpm', - 'check', - 'upload', - # These two are reserved for future use: - #'bdist_sdux', - #'bdist_pkgtool', - # Note: - # bdist_packager is not included because it only provides - # an abstract base class - ] diff --git a/Lib/distutils/command/bdist.py b/Lib/distutils/command/bdist.py deleted file mode 100644 index 60309e1ff2fce3..00000000000000 --- a/Lib/distutils/command/bdist.py +++ /dev/null @@ -1,138 +0,0 @@ -"""distutils.command.bdist - -Implements the Distutils 'bdist' command (create a built [binary] -distribution).""" - -import os -from distutils.core import Command -from distutils.errors import * -from distutils.util import get_platform - - -def show_formats(): - """Print list of available formats (arguments to "--format" option). - """ - from distutils.fancy_getopt import FancyGetopt - formats = [] - for format in bdist.format_commands: - formats.append(("formats=" + format, None, - bdist.format_command[format][1])) - pretty_printer = FancyGetopt(formats) - pretty_printer.print_help("List of available distribution formats:") - - -class bdist(Command): - - description = "create a built (binary) distribution" - - user_options = [('bdist-base=', 'b', - "temporary directory for creating built distributions"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('formats=', None, - "formats for distribution (comma-separated list)"), - ('dist-dir=', 'd', - "directory to put final built distributions in " - "[default: dist]"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['skip-build'] - - help_options = [ - ('help-formats', None, - "lists available distribution formats", show_formats), - ] - - # The following commands do not take a format option from bdist - no_format_option = ('bdist_rpm',) - - # This won't do in reality: will need to distinguish RPM-ish Linux, - # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. - default_format = {'posix': 'gztar', - 'nt': 'zip'} - - # Establish the preferred order (for the --help-formats option). - format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip'] - - # And the real information. - format_command = {'rpm': ('bdist_rpm', "RPM distribution"), - 'gztar': ('bdist_dumb', "gzip'ed tar file"), - 'bztar': ('bdist_dumb', "bzip2'ed tar file"), - 'xztar': ('bdist_dumb', "xz'ed tar file"), - 'ztar': ('bdist_dumb', "compressed tar file"), - 'tar': ('bdist_dumb', "tar file"), - 'zip': ('bdist_dumb', "ZIP file"), - } - - def initialize_options(self): - self.bdist_base = None - self.plat_name = None - self.formats = None - self.dist_dir = None - self.skip_build = 0 - self.group = None - self.owner = None - - def finalize_options(self): - # have to finalize 'plat_name' before 'bdist_base' - if self.plat_name is None: - if self.skip_build: - self.plat_name = get_platform() - else: - self.plat_name = self.get_finalized_command('build').plat_name - - # 'bdist_base' -- parent of per-built-distribution-format - # temporary directories (eg. we'll probably have - # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.) - if self.bdist_base is None: - build_base = self.get_finalized_command('build').build_base - self.bdist_base = os.path.join(build_base, - 'bdist.' + self.plat_name) - - self.ensure_string_list('formats') - if self.formats is None: - try: - self.formats = [self.default_format[os.name]] - except KeyError: - raise DistutilsPlatformError( - "don't know how to create built distributions " - "on platform %s" % os.name) - - if self.dist_dir is None: - self.dist_dir = "dist" - - def run(self): - # Figure out which sub-commands we need to run. - commands = [] - for format in self.formats: - try: - commands.append(self.format_command[format][0]) - except KeyError: - raise DistutilsOptionError("invalid format '%s'" % format) - - # Reinitialize and run each command. - for i in range(len(self.formats)): - cmd_name = commands[i] - sub_cmd = self.reinitialize_command(cmd_name) - if cmd_name not in self.no_format_option: - sub_cmd.format = self.formats[i] - - # passing the owner and group names for tar archiving - if cmd_name == 'bdist_dumb': - sub_cmd.owner = self.owner - sub_cmd.group = self.group - - # If we're going to need to run this command again, tell it to - # keep its temporary files around so subsequent runs go faster. - if cmd_name in commands[i+1:]: - sub_cmd.keep_temp = 1 - self.run_command(cmd_name) diff --git a/Lib/distutils/command/bdist_dumb.py b/Lib/distutils/command/bdist_dumb.py deleted file mode 100644 index f0d6b5b8cd8ab3..00000000000000 --- a/Lib/distutils/command/bdist_dumb.py +++ /dev/null @@ -1,123 +0,0 @@ -"""distutils.command.bdist_dumb - -Implements the Distutils 'bdist_dumb' command (create a "dumb" built -distribution -- i.e., just an archive to be unpacked under $prefix or -$exec_prefix).""" - -import os -from distutils.core import Command -from distutils.util import get_platform -from distutils.dir_util import remove_tree, ensure_relative -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_dumb(Command): - - description = "create a \"dumb\" built distribution" - - user_options = [('bdist-dir=', 'd', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('format=', 'f', - "archive format to create (tar, gztar, bztar, xztar, " - "ztar, zip)"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths " - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['keep-temp', 'skip-build', 'relative'] - - default_format = { 'posix': 'gztar', - 'nt': 'zip' } - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.format = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = None - self.relative = 0 - self.owner = None - self.group = None - - def finalize_options(self): - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'dumb') - - if self.format is None: - try: - self.format = self.default_format[os.name] - except KeyError: - raise DistutilsPlatformError( - "don't know how to create dumb built distributions " - "on platform %s" % os.name) - - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ('skip_build', 'skip_build')) - - def run(self): - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=1) - install.root = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = 0 - - log.info("installing to %s", self.bdist_dir) - self.run_command('install') - - # And make an archive relative to the root of the - # pseudo-installation tree. - archive_basename = "%s.%s" % (self.distribution.get_fullname(), - self.plat_name) - - pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) - if not self.relative: - archive_root = self.bdist_dir - else: - if (self.distribution.has_ext_modules() and - (install.install_base != install.install_platbase)): - raise DistutilsPlatformError( - "can't make a dumb built distribution where " - "base and platbase are different (%s, %s)" - % (repr(install.install_base), - repr(install.install_platbase))) - else: - archive_root = os.path.join(self.bdist_dir, - ensure_relative(install.install_base)) - - # Make the archive - filename = self.make_archive(pseudoinstall_root, - self.format, root_dir=archive_root, - owner=self.owner, group=self.group) - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - self.distribution.dist_files.append(('bdist_dumb', pyversion, - filename)) - - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py deleted file mode 100644 index 550cbfa1e28a23..00000000000000 --- a/Lib/distutils/command/bdist_rpm.py +++ /dev/null @@ -1,579 +0,0 @@ -"""distutils.command.bdist_rpm - -Implements the Distutils 'bdist_rpm' command (create RPM source and binary -distributions).""" - -import subprocess, sys, os -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.file_util import write_file -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_rpm(Command): - - description = "create an RPM distribution" - - user_options = [ - ('bdist-base=', None, - "base directory for creating built distributions"), - ('rpm-base=', None, - "base directory for creating RPMs (defaults to \"rpm\" under " - "--bdist-base; must be specified for RPM 2)"), - ('dist-dir=', 'd', - "directory to put final RPM files in " - "(and .spec files if --spec-only)"), - ('python=', None, - "path to Python interpreter to hard-code in the .spec file " - "(default: \"python\")"), - ('fix-python', None, - "hard-code the exact path to the current Python interpreter in " - "the .spec file"), - ('spec-only', None, - "only regenerate spec file"), - ('source-only', None, - "only generate source RPM"), - ('binary-only', None, - "only generate binary RPM"), - ('use-bzip2', None, - "use bzip2 instead of gzip to create source distribution"), - - # More meta-data: too RPM-specific to put in the setup script, - # but needs to go in the .spec file -- so we make these options - # to "bdist_rpm". The idea is that packagers would put this - # info in setup.cfg, although they are of course free to - # supply it on the command line. - ('distribution-name=', None, - "name of the (Linux) distribution to which this " - "RPM applies (*not* the name of the module distribution!)"), - ('group=', None, - "package classification [default: \"Development/Libraries\"]"), - ('release=', None, - "RPM release number"), - ('serial=', None, - "RPM serial number"), - ('vendor=', None, - "RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") " - "[default: maintainer or author from setup script]"), - ('packager=', None, - "RPM packager (eg. \"Jane Doe <jane@example.net>\") " - "[default: vendor]"), - ('doc-files=', None, - "list of documentation files (space or comma-separated)"), - ('changelog=', None, - "RPM changelog"), - ('icon=', None, - "name of icon file"), - ('provides=', None, - "capabilities provided by this package"), - ('requires=', None, - "capabilities required by this package"), - ('conflicts=', None, - "capabilities which conflict with this package"), - ('build-requires=', None, - "capabilities required to build this package"), - ('obsoletes=', None, - "capabilities made obsolete by this package"), - ('no-autoreq', None, - "do not automatically calculate dependencies"), - - # Actions to take when building RPM - ('keep-temp', 'k', - "don't clean up RPM build directory"), - ('no-keep-temp', None, - "clean up RPM build directory [default]"), - ('use-rpm-opt-flags', None, - "compile with RPM_OPT_FLAGS when building from source RPM"), - ('no-rpm-opt-flags', None, - "do not pass any RPM CFLAGS to compiler"), - ('rpm3-mode', None, - "RPM 3 compatibility mode (default)"), - ('rpm2-mode', None, - "RPM 2 compatibility mode"), - - # Add the hooks necessary for specifying custom scripts - ('prep-script=', None, - "Specify a script for the PREP phase of RPM building"), - ('build-script=', None, - "Specify a script for the BUILD phase of RPM building"), - - ('pre-install=', None, - "Specify a script for the pre-INSTALL phase of RPM building"), - ('install-script=', None, - "Specify a script for the INSTALL phase of RPM building"), - ('post-install=', None, - "Specify a script for the post-INSTALL phase of RPM building"), - - ('pre-uninstall=', None, - "Specify a script for the pre-UNINSTALL phase of RPM building"), - ('post-uninstall=', None, - "Specify a script for the post-UNINSTALL phase of RPM building"), - - ('clean-script=', None, - "Specify a script for the CLEAN phase of RPM building"), - - ('verify-script=', None, - "Specify a script for the VERIFY phase of the RPM build"), - - # Allow a packager to explicitly force an architecture - ('force-arch=', None, - "Force an architecture onto the RPM build process"), - - ('quiet', 'q', - "Run the INSTALL phase of RPM building in quiet mode"), - ] - - boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode', - 'no-autoreq', 'quiet'] - - negative_opt = {'no-keep-temp': 'keep-temp', - 'no-rpm-opt-flags': 'use-rpm-opt-flags', - 'rpm2-mode': 'rpm3-mode'} - - - def initialize_options(self): - self.bdist_base = None - self.rpm_base = None - self.dist_dir = None - self.python = None - self.fix_python = None - self.spec_only = None - self.binary_only = None - self.source_only = None - self.use_bzip2 = None - - self.distribution_name = None - self.group = None - self.release = None - self.serial = None - self.vendor = None - self.packager = None - self.doc_files = None - self.changelog = None - self.icon = None - - self.prep_script = None - self.build_script = None - self.install_script = None - self.clean_script = None - self.verify_script = None - self.pre_install = None - self.post_install = None - self.pre_uninstall = None - self.post_uninstall = None - self.prep = None - self.provides = None - self.requires = None - self.conflicts = None - self.build_requires = None - self.obsoletes = None - - self.keep_temp = 0 - self.use_rpm_opt_flags = 1 - self.rpm3_mode = 1 - self.no_autoreq = 0 - - self.force_arch = None - self.quiet = 0 - - def finalize_options(self): - self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) - if self.rpm_base is None: - if not self.rpm3_mode: - raise DistutilsOptionError( - "you must specify --rpm-base in RPM 2 mode") - self.rpm_base = os.path.join(self.bdist_base, "rpm") - - if self.python is None: - if self.fix_python: - self.python = sys.executable - else: - self.python = "python3" - elif self.fix_python: - raise DistutilsOptionError( - "--python and --fix-python are mutually exclusive options") - - if os.name != 'posix': - raise DistutilsPlatformError("don't know how to create RPM " - "distributions on platform %s" % os.name) - if self.binary_only and self.source_only: - raise DistutilsOptionError( - "cannot supply both '--source-only' and '--binary-only'") - - # don't pass CFLAGS to pure python distributions - if not self.distribution.has_ext_modules(): - self.use_rpm_opt_flags = 0 - - self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) - self.finalize_package_data() - - def finalize_package_data(self): - self.ensure_string('group', "Development/Libraries") - self.ensure_string('vendor', - "%s <%s>" % (self.distribution.get_contact(), - self.distribution.get_contact_email())) - self.ensure_string('packager') - self.ensure_string_list('doc_files') - if isinstance(self.doc_files, list): - for readme in ('README', 'README.txt'): - if os.path.exists(readme) and readme not in self.doc_files: - self.doc_files.append(readme) - - self.ensure_string('release', "1") - self.ensure_string('serial') # should it be an int? - - self.ensure_string('distribution_name') - - self.ensure_string('changelog') - # Format changelog correctly - self.changelog = self._format_changelog(self.changelog) - - self.ensure_filename('icon') - - self.ensure_filename('prep_script') - self.ensure_filename('build_script') - self.ensure_filename('install_script') - self.ensure_filename('clean_script') - self.ensure_filename('verify_script') - self.ensure_filename('pre_install') - self.ensure_filename('post_install') - self.ensure_filename('pre_uninstall') - self.ensure_filename('post_uninstall') - - # XXX don't forget we punted on summaries and descriptions -- they - # should be handled here eventually! - - # Now *this* is some meta-data that belongs in the setup script... - self.ensure_string_list('provides') - self.ensure_string_list('requires') - self.ensure_string_list('conflicts') - self.ensure_string_list('build_requires') - self.ensure_string_list('obsoletes') - - self.ensure_string('force_arch') - - def run(self): - if DEBUG: - print("before _get_package_data():") - print("vendor =", self.vendor) - print("packager =", self.packager) - print("doc_files =", self.doc_files) - print("changelog =", self.changelog) - - # make directories - if self.spec_only: - spec_dir = self.dist_dir - self.mkpath(spec_dir) - else: - rpm_dir = {} - for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'): - rpm_dir[d] = os.path.join(self.rpm_base, d) - self.mkpath(rpm_dir[d]) - spec_dir = rpm_dir['SPECS'] - - # Spec file goes into 'dist_dir' if '--spec-only specified', - # build/rpm.<plat> otherwise. - spec_path = os.path.join(spec_dir, - "%s.spec" % self.distribution.get_name()) - self.execute(write_file, - (spec_path, - self._make_spec_file()), - "writing '%s'" % spec_path) - - if self.spec_only: # stop if requested - return - - # Make a source distribution and copy to SOURCES directory with - # optional icon. - saved_dist_files = self.distribution.dist_files[:] - sdist = self.reinitialize_command('sdist') - if self.use_bzip2: - sdist.formats = ['bztar'] - else: - sdist.formats = ['gztar'] - self.run_command('sdist') - self.distribution.dist_files = saved_dist_files - - source = sdist.get_archive_files()[0] - source_dir = rpm_dir['SOURCES'] - self.copy_file(source, source_dir) - - if self.icon: - if os.path.exists(self.icon): - self.copy_file(self.icon, source_dir) - else: - raise DistutilsFileError( - "icon file '%s' does not exist" % self.icon) - - # build package - log.info("building RPMs") - rpm_cmd = ['rpmbuild'] - - if self.source_only: # what kind of RPMs? - rpm_cmd.append('-bs') - elif self.binary_only: - rpm_cmd.append('-bb') - else: - rpm_cmd.append('-ba') - rpm_cmd.extend(['--define', '__python %s' % self.python]) - if self.rpm3_mode: - rpm_cmd.extend(['--define', - '_topdir %s' % os.path.abspath(self.rpm_base)]) - if not self.keep_temp: - rpm_cmd.append('--clean') - - if self.quiet: - rpm_cmd.append('--quiet') - - rpm_cmd.append(spec_path) - # Determine the binary rpm names that should be built out of this spec - # file - # Note that some of these may not be really built (if the file - # list is empty) - nvr_string = "%{name}-%{version}-%{release}" - src_rpm = nvr_string + ".src.rpm" - non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" - q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % ( - src_rpm, non_src_rpm, spec_path) - - out = os.popen(q_cmd) - try: - binary_rpms = [] - source_rpm = None - while True: - line = out.readline() - if not line: - break - l = line.strip().split() - assert(len(l) == 2) - binary_rpms.append(l[1]) - # The source rpm is named after the first entry in the spec file - if source_rpm is None: - source_rpm = l[0] - - status = out.close() - if status: - raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd)) - - finally: - out.close() - - self.spawn(rpm_cmd) - - if not self.dry_run: - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - - if not self.binary_only: - srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) - assert(os.path.exists(srpm)) - self.move_file(srpm, self.dist_dir) - filename = os.path.join(self.dist_dir, source_rpm) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) - - if not self.source_only: - for rpm in binary_rpms: - rpm = os.path.join(rpm_dir['RPMS'], rpm) - if os.path.exists(rpm): - self.move_file(rpm, self.dist_dir) - filename = os.path.join(self.dist_dir, - os.path.basename(rpm)) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) - - def _dist_path(self, path): - return os.path.join(self.dist_dir, os.path.basename(path)) - - def _make_spec_file(self): - """Generate the text of an RPM spec file and return it as a - list of strings (one per line). - """ - # definitions and headers - spec_file = [ - '%define name ' + self.distribution.get_name(), - '%define version ' + self.distribution.get_version().replace('-','_'), - '%define unmangled_version ' + self.distribution.get_version(), - '%define release ' + self.release.replace('-','_'), - '', - 'Summary: ' + self.distribution.get_description(), - ] - - # Workaround for #14443 which affects some RPM based systems such as - # RHEL6 (and probably derivatives) - vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}') - # Generate a potential replacement value for __os_install_post (whilst - # normalizing the whitespace to simplify the test for whether the - # invocation of brp-python-bytecompile passes in __python): - vendor_hook = '\n'.join([' %s \\' % line.strip() - for line in vendor_hook.splitlines()]) - problem = "brp-python-bytecompile \\\n" - fixed = "brp-python-bytecompile %{__python} \\\n" - fixed_hook = vendor_hook.replace(problem, fixed) - if fixed_hook != vendor_hook: - spec_file.append('# Workaround for http://bugs.python.org/issue14443') - spec_file.append('%define __os_install_post ' + fixed_hook + '\n') - - # put locale summaries into spec file - # XXX not supported for now (hard to put a dictionary - # in a config file -- arg!) - #for locale in self.summaries.keys(): - # spec_file.append('Summary(%s): %s' % (locale, - # self.summaries[locale])) - - spec_file.extend([ - 'Name: %{name}', - 'Version: %{version}', - 'Release: %{release}',]) - - # XXX yuck! this filename is available from the "sdist" command, - # but only after it has run: and we create the spec file before - # running "sdist", in case of --spec-only. - if self.use_bzip2: - spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2') - else: - spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') - - spec_file.extend([ - 'License: ' + self.distribution.get_license(), - 'Group: ' + self.group, - 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', - 'Prefix: %{_prefix}', ]) - - if not self.force_arch: - # noarch if no extension modules - if not self.distribution.has_ext_modules(): - spec_file.append('BuildArch: noarch') - else: - spec_file.append( 'BuildArch: %s' % self.force_arch ) - - for field in ('Vendor', - 'Packager', - 'Provides', - 'Requires', - 'Conflicts', - 'Obsoletes', - ): - val = getattr(self, field.lower()) - if isinstance(val, list): - spec_file.append('%s: %s' % (field, ' '.join(val))) - elif val is not None: - spec_file.append('%s: %s' % (field, val)) - - - if self.distribution.get_url() != 'UNKNOWN': - spec_file.append('Url: ' + self.distribution.get_url()) - - if self.distribution_name: - spec_file.append('Distribution: ' + self.distribution_name) - - if self.build_requires: - spec_file.append('BuildRequires: ' + - ' '.join(self.build_requires)) - - if self.icon: - spec_file.append('Icon: ' + os.path.basename(self.icon)) - - if self.no_autoreq: - spec_file.append('AutoReq: 0') - - spec_file.extend([ - '', - '%description', - self.distribution.get_long_description() - ]) - - # put locale descriptions into spec file - # XXX again, suppressed because config file syntax doesn't - # easily support this ;-( - #for locale in self.descriptions.keys(): - # spec_file.extend([ - # '', - # '%description -l ' + locale, - # self.descriptions[locale], - # ]) - - # rpm scripts - # figure out default build script - def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) - def_build = "%s build" % def_setup_call - if self.use_rpm_opt_flags: - def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build - - # insert contents of files - - # XXX this is kind of misleading: user-supplied options are files - # that we open and interpolate into the spec file, but the defaults - # are just text that we drop in as-is. Hmmm. - - install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT ' - '--record=INSTALLED_FILES') % def_setup_call - - script_options = [ - ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), - ('build', 'build_script', def_build), - ('install', 'install_script', install_cmd), - ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"), - ('verifyscript', 'verify_script', None), - ('pre', 'pre_install', None), - ('post', 'post_install', None), - ('preun', 'pre_uninstall', None), - ('postun', 'post_uninstall', None), - ] - - for (rpm_opt, attr, default) in script_options: - # Insert contents of file referred to, if no file is referred to - # use 'default' as contents of script - val = getattr(self, attr) - if val or default: - spec_file.extend([ - '', - '%' + rpm_opt,]) - if val: - with open(val) as f: - spec_file.extend(f.read().split('\n')) - else: - spec_file.append(default) - - - # files section - spec_file.extend([ - '', - '%files -f INSTALLED_FILES', - '%defattr(-,root,root)', - ]) - - if self.doc_files: - spec_file.append('%doc ' + ' '.join(self.doc_files)) - - if self.changelog: - spec_file.extend([ - '', - '%changelog',]) - spec_file.extend(self.changelog) - - return spec_file - - def _format_changelog(self, changelog): - """Format the changelog correctly and convert it to a list of strings - """ - if not changelog: - return changelog - new_changelog = [] - for line in changelog.strip().split('\n'): - line = line.strip() - if line[0] == '*': - new_changelog.extend(['', line]) - elif line[0] == '-': - new_changelog.append(line) - else: - new_changelog.append(' ' + line) - - # strip trailing newline inserted by first changelog entry - if not new_changelog[0]: - del new_changelog[0] - - return new_changelog diff --git a/Lib/distutils/command/build.py b/Lib/distutils/command/build.py deleted file mode 100644 index a86df0bc7f9218..00000000000000 --- a/Lib/distutils/command/build.py +++ /dev/null @@ -1,157 +0,0 @@ -"""distutils.command.build - -Implements the Distutils 'build' command.""" - -import sys, os -from distutils.core import Command -from distutils.errors import DistutilsOptionError -from distutils.util import get_platform - - -def show_compilers(): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build(Command): - - description = "build everything needed to install" - - user_options = [ - ('build-base=', 'b', - "base directory for build library"), - ('build-purelib=', None, - "build directory for platform-neutral distributions"), - ('build-platlib=', None, - "build directory for platform-specific distributions"), - ('build-lib=', None, - "build directory for all distribution (defaults to either " + - "build-purelib or build-platlib"), - ('build-scripts=', None, - "build directory for scripts"), - ('build-temp=', 't', - "temporary build directory"), - ('plat-name=', 'p', - "platform name to build for, if supported " - "(default: %s)" % get_platform()), - ('compiler=', 'c', - "specify the compiler type"), - ('parallel=', 'j', - "number of parallel build jobs"), - ('debug', 'g', - "compile extensions and libraries with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('executable=', 'e', - "specify final destination interpreter path (build.py)"), - ] - - boolean_options = ['debug', 'force'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.build_base = 'build' - # these are decided only after 'build_base' has its final value - # (unless overridden by the user or client) - self.build_purelib = None - self.build_platlib = None - self.build_lib = None - self.build_temp = None - self.build_scripts = None - self.compiler = None - self.plat_name = None - self.debug = None - self.force = 0 - self.executable = None - self.parallel = None - - def finalize_options(self): - if self.plat_name is None: - self.plat_name = get_platform() - else: - # plat-name only supported for windows (other platforms are - # supported via ./configure flags, if at all). Avoid misleading - # other platforms. - if os.name != 'nt': - raise DistutilsOptionError( - "--plat-name only supported on Windows (try " - "using './configure --help' on your platform)") - - plat_specifier = ".%s-%d.%d" % (self.plat_name, *sys.version_info[:2]) - - # Make it so Python 2.x and Python 2.x with --with-pydebug don't - # share the same build directories. Doing so confuses the build - # process for C modules - if hasattr(sys, 'gettotalrefcount'): - plat_specifier += '-pydebug' - - # 'build_purelib' and 'build_platlib' just default to 'lib' and - # 'lib.<plat>' under the base build directory. We only use one of - # them for a given distribution, though -- - if self.build_purelib is None: - self.build_purelib = os.path.join(self.build_base, 'lib') - if self.build_platlib is None: - self.build_platlib = os.path.join(self.build_base, - 'lib' + plat_specifier) - - # 'build_lib' is the actual directory that we will use for this - # particular module distribution -- if user didn't supply it, pick - # one of 'build_purelib' or 'build_platlib'. - if self.build_lib is None: - if self.distribution.ext_modules: - self.build_lib = self.build_platlib - else: - self.build_lib = self.build_purelib - - # 'build_temp' -- temporary directory for compiler turds, - # "build/temp.<plat>" - if self.build_temp is None: - self.build_temp = os.path.join(self.build_base, - 'temp' + plat_specifier) - if self.build_scripts is None: - self.build_scripts = os.path.join(self.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) - - if self.executable is None and sys.executable: - self.executable = os.path.normpath(sys.executable) - - if isinstance(self.parallel, str): - try: - self.parallel = int(self.parallel) - except ValueError: - raise DistutilsOptionError("parallel should be an integer") - - def run(self): - # Run all relevant sub-commands. This will be some subset of: - # - build_py - pure Python modules - # - build_clib - standalone C libraries - # - build_ext - Python extensions - # - build_scripts - (Python) scripts - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - - # -- Predicates for the sub-command list --------------------------- - - def has_pure_modules(self): - return self.distribution.has_pure_modules() - - def has_c_libraries(self): - return self.distribution.has_c_libraries() - - def has_ext_modules(self): - return self.distribution.has_ext_modules() - - def has_scripts(self): - return self.distribution.has_scripts() - - - sub_commands = [('build_py', has_pure_modules), - ('build_clib', has_c_libraries), - ('build_ext', has_ext_modules), - ('build_scripts', has_scripts), - ] diff --git a/Lib/distutils/command/build_clib.py b/Lib/distutils/command/build_clib.py deleted file mode 100644 index 3e20ef23cd81e0..00000000000000 --- a/Lib/distutils/command/build_clib.py +++ /dev/null @@ -1,209 +0,0 @@ -"""distutils.command.build_clib - -Implements the Distutils 'build_clib' command, to build a C/C++ library -that is included in the module distribution and needed by an extension -module.""" - - -# XXX this module has *lots* of code ripped-off quite transparently from -# build_ext.py -- not surprisingly really, as the work required to build -# a static library from a collection of C source files is not really all -# that different from what's required to build a shared object file from -# a collection of C source files. Nevertheless, I haven't done the -# necessary refactoring to account for the overlap in code between the -# two modules, mainly because a number of subtle details changed in the -# cut 'n paste. Sigh. - -import os -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler -from distutils import log - -def show_compilers(): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build_clib(Command): - - description = "build C/C++ libraries used by Python extensions" - - user_options = [ - ('build-clib=', 'b', - "directory to build C/C++ libraries to"), - ('build-temp=', 't', - "directory to put temporary build by-products"), - ('debug', 'g', - "compile with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ] - - boolean_options = ['debug', 'force'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.build_clib = None - self.build_temp = None - - # List of libraries to build - self.libraries = None - - # Compilation options for all libraries - self.include_dirs = None - self.define = None - self.undef = None - self.debug = None - self.force = 0 - self.compiler = None - - - def finalize_options(self): - # This might be confusing: both build-clib and build-temp default - # to build-temp as defined by the "build" command. This is because - # I think that C libraries are really just temporary build - # by-products, at least from the point of view of building Python - # extensions -- but I want to keep my options open. - self.set_undefined_options('build', - ('build_temp', 'build_clib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force')) - - self.libraries = self.distribution.libraries - if self.libraries: - self.check_library_list(self.libraries) - - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # XXX same as for build_ext -- what about 'self.define' and - # 'self.undef' ? - - - def run(self): - if not self.libraries: - return - - # Yech -- this is cut 'n pasted from build_ext.py! - from distutils.ccompiler import new_compiler - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, - force=self.force) - customize_compiler(self.compiler) - - if self.include_dirs is not None: - self.compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - self.compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler.undefine_macro(macro) - - self.build_libraries(self.libraries) - - - def check_library_list(self, libraries): - """Ensure that the list of libraries is valid. - - `library` is presumably provided as a command option 'libraries'. - This method checks that it is a list of 2-tuples, where the tuples - are (library_name, build_info_dict). - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - if not isinstance(libraries, list): - raise DistutilsSetupError( - "'libraries' option must be a list of tuples") - - for lib in libraries: - if not isinstance(lib, tuple) and len(lib) != 2: - raise DistutilsSetupError( - "each element of 'libraries' must a 2-tuple") - - name, build_info = lib - - if not isinstance(name, str): - raise DistutilsSetupError( - "first element of each tuple in 'libraries' " - "must be a string (the library name)") - - if '/' in name or (os.sep != '/' and os.sep in name): - raise DistutilsSetupError("bad library name '%s': " - "may not contain directory separators" % lib[0]) - - if not isinstance(build_info, dict): - raise DistutilsSetupError( - "second element of each tuple in 'libraries' " - "must be a dictionary (build info)") - - - def get_library_names(self): - # Assume the library list is valid -- 'check_library_list()' is - # called from 'finalize_options()', so it should be! - if not self.libraries: - return None - - lib_names = [] - for (lib_name, build_info) in self.libraries: - lib_names.append(lib_name) - return lib_names - - - def get_source_files(self): - self.check_library_list(self.libraries) - filenames = [] - for (lib_name, build_info) in self.libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - - filenames.extend(sources) - return filenames - - - def build_libraries(self, libraries): - for (lib_name, build_info) in libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - sources = list(sources) - - log.info("building '%s' library", lib_name) - - # First, compile the source code to object files in the library - # directory. (This should probably change to putting object - # files in a temporary build directory.) - macros = build_info.get('macros') - include_dirs = build_info.get('include_dirs') - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - debug=self.debug) - - # Now "link" the object files together into a static library. - # (On Unix at least, this isn't really linking -- it just - # builds an archive. Whatever.) - self.compiler.create_static_lib(objects, lib_name, - output_dir=self.build_clib, - debug=self.debug) diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py deleted file mode 100644 index f287b349984ff0..00000000000000 --- a/Lib/distutils/command/build_ext.py +++ /dev/null @@ -1,754 +0,0 @@ -"""distutils.command.build_ext - -Implements the Distutils 'build_ext' command, for building extension -modules (currently limited to C extensions, should accommodate C++ -extensions ASAP).""" - -import contextlib -import os -import re -import sys -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler, get_python_version -from distutils.sysconfig import get_config_h_filename -from distutils.dep_util import newer_group -from distutils.extension import Extension -from distutils.util import get_platform -from distutils import log - -from site import USER_BASE - -# An extension name is just a dot-separated list of Python NAMEs (ie. -# the same as a fully-qualified module name). -extension_name_re = re.compile \ - (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') - - -def show_compilers (): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build_ext(Command): - - description = "build C/C++ extensions (compile/link to build directory)" - - # XXX thoughts on how to deal with complex command-line options like - # these, i.e. how to make it so fancy_getopt can suck them off the - # command line and make it look like setup.py defined the appropriate - # lists of tuples of what-have-you. - # - each command needs a callback to process its command-line options - # - Command.__init__() needs access to its share of the whole - # command line (must ultimately come from - # Distribution.parse_command_line()) - # - it then calls the current command class' option-parsing - # callback to deal with weird options like -D, which have to - # parse the option text and churn out some custom data - # structure - # - that data structure (in this case, a list of 2-tuples) - # will then be present in the command object by the time - # we get to finalize_options() (i.e. the constructor - # takes care of both command-line and client options - # in between initialize_options() and finalize_options()) - - sep_by = " (separated by '%s')" % os.pathsep - user_options = [ - ('build-lib=', 'b', - "directory for compiled extension modules"), - ('build-temp=', 't', - "directory for temporary files (build by-products)"), - ('plat-name=', 'p', - "platform name to cross-compile for, if supported " - "(default: %s)" % get_platform()), - ('inplace', 'i', - "ignore build-lib and put compiled extensions into the source " + - "directory alongside your pure Python modules"), - ('include-dirs=', 'I', - "list of directories to search for header files" + sep_by), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries" + sep_by), - ('rpath=', 'R', - "directories to search for shared C libraries at runtime"), - ('link-objects=', 'O', - "extra explicit link objects to include in the link"), - ('debug', 'g', - "compile/link with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ('parallel=', 'j', - "number of parallel build jobs"), - ('swig-cpp', None, - "make SWIG create C++ files (default is C)"), - ('swig-opts=', None, - "list of SWIG command line options"), - ('swig=', None, - "path to the SWIG executable"), - ('user', None, - "add user include, library and rpath") - ] - - boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.extensions = None - self.build_lib = None - self.plat_name = None - self.build_temp = None - self.inplace = 0 - self.package = None - - self.include_dirs = None - self.define = None - self.undef = None - self.libraries = None - self.library_dirs = None - self.rpath = None - self.link_objects = None - self.debug = None - self.force = None - self.compiler = None - self.swig = None - self.swig_cpp = None - self.swig_opts = None - self.user = None - self.parallel = None - - def finalize_options(self): - from distutils import sysconfig - - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force'), - ('parallel', 'parallel'), - ('plat_name', 'plat_name'), - ) - - if self.package is None: - self.package = self.distribution.ext_package - - self.extensions = self.distribution.ext_modules - - # Make sure Python's include directories (for Python.h, pyconfig.h, - # etc.) are in the include search path. - py_include = sysconfig.get_python_inc() - plat_py_include = sysconfig.get_python_inc(plat_specific=1) - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # If in a virtualenv, add its include directory - # Issue 16116 - if sys.exec_prefix != sys.base_exec_prefix: - self.include_dirs.append(os.path.join(sys.exec_prefix, 'include')) - - # Put the Python "system" include dir at the end, so that - # any local include dirs take precedence. - self.include_dirs.extend(py_include.split(os.path.pathsep)) - if plat_py_include != py_include: - self.include_dirs.extend( - plat_py_include.split(os.path.pathsep)) - - self.ensure_string_list('libraries') - self.ensure_string_list('link_objects') - - # Life is easier if we're not forever checking for None, so - # simplify these options to empty lists if unset - if self.libraries is None: - self.libraries = [] - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - if self.rpath is None: - self.rpath = [] - elif isinstance(self.rpath, str): - self.rpath = self.rpath.split(os.pathsep) - - # for extensions under windows use different directories - # for Release and Debug builds. - # also Python's library directory must be appended to library_dirs - if os.name == 'nt': - # the 'libs' directory is for binary installs - we assume that - # must be the *native* platform. But we don't really support - # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) - if sys.base_exec_prefix != sys.prefix: # Issue 16116 - self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) - if self.debug: - self.build_temp = os.path.join(self.build_temp, "Debug") - else: - self.build_temp = os.path.join(self.build_temp, "Release") - - # Append the source distribution include and library directories, - # this allows distutils on windows to work in the source tree - self.include_dirs.append(os.path.dirname(get_config_h_filename())) - _sys_home = getattr(sys, '_home', None) - if _sys_home: - self.library_dirs.append(_sys_home) - - # Use the .lib files for the correct architecture - if self.plat_name == 'win32': - suffix = 'win32' - else: - # win-amd64 - suffix = self.plat_name[4:] - new_lib = os.path.join(sys.exec_prefix, 'PCbuild') - if suffix: - new_lib = os.path.join(new_lib, suffix) - self.library_dirs.append(new_lib) - - # For extensions under Cygwin, Python's library directory must be - # appended to library_dirs - if sys.platform[:6] == 'cygwin': - if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): - # building third party extensions - self.library_dirs.append(os.path.join(sys.prefix, "lib", - "python" + get_python_version(), - "config")) - else: - # building python standard extensions - self.library_dirs.append('.') - - # For building extensions with a shared Python library, - # Python's library directory must be appended to library_dirs - # See Issues: #1600860, #4366 - if (sysconfig.get_config_var('Py_ENABLE_SHARED')): - if not sysconfig.python_build: - # building third party extensions - self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) - else: - # building python standard extensions - self.library_dirs.append('.') - - # The argument parsing will result in self.define being a string, but - # it has to be a list of 2-tuples. All the preprocessor symbols - # specified by the 'define' option will be set to '1'. Multiple - # symbols can be separated with commas. - - if self.define: - defines = self.define.split(',') - self.define = [(symbol, '1') for symbol in defines] - - # The option for macros to undefine is also a string from the - # option parsing, but has to be a list. Multiple symbols can also - # be separated with commas here. - if self.undef: - self.undef = self.undef.split(',') - - if self.swig_opts is None: - self.swig_opts = [] - else: - self.swig_opts = self.swig_opts.split(' ') - - # Finally add the user include and library directories if requested - if self.user: - user_include = os.path.join(USER_BASE, "include") - user_lib = os.path.join(USER_BASE, "lib") - if os.path.isdir(user_include): - self.include_dirs.append(user_include) - if os.path.isdir(user_lib): - self.library_dirs.append(user_lib) - self.rpath.append(user_lib) - - if isinstance(self.parallel, str): - try: - self.parallel = int(self.parallel) - except ValueError: - raise DistutilsOptionError("parallel should be an integer") - - def run(self): - from distutils.ccompiler import new_compiler - - # 'self.extensions', as supplied by setup.py, is a list of - # Extension instances. See the documentation for Extension (in - # distutils.extension) for details. - # - # For backwards compatibility with Distutils 0.8.2 and earlier, we - # also allow the 'extensions' list to be a list of tuples: - # (ext_name, build_info) - # where build_info is a dictionary containing everything that - # Extension instances do except the name, with a few things being - # differently named. We convert these 2-tuples to Extension - # instances as needed. - - if not self.extensions: - return - - # If we were asked to build any C/C++ libraries, make sure that the - # directory where we put them is in the library search path for - # linking extensions. - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.libraries.extend(build_clib.get_library_names() or []) - self.library_dirs.append(build_clib.build_clib) - - # Setup the CCompiler object that we'll use to do all the - # compiling and linking - self.compiler = new_compiler(compiler=self.compiler, - verbose=self.verbose, - dry_run=self.dry_run, - force=self.force) - customize_compiler(self.compiler) - # If we are cross-compiling, init the compiler now (if we are not - # cross-compiling, init would not hurt, but people may rely on - # late initialization of compiler even if they shouldn't...) - if os.name == 'nt' and self.plat_name != get_platform(): - self.compiler.initialize(self.plat_name) - - # And make sure that any compile/link-related options (which might - # come from the command-line or from the setup script) are set in - # that CCompiler object -- that way, they automatically apply to - # all compiling and linking done here. - if self.include_dirs is not None: - self.compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: - self.compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler.undefine_macro(macro) - if self.libraries is not None: - self.compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - self.compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - self.compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - self.compiler.set_link_objects(self.link_objects) - - # Now actually compile and link everything. - self.build_extensions() - - def check_extensions_list(self, extensions): - """Ensure that the list of extensions (presumably provided as a - command option 'extensions') is valid, i.e. it is a list of - Extension objects. We also support the old-style list of 2-tuples, - where the tuples are (ext_name, build_info), which are converted to - Extension instances here. - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - if not isinstance(extensions, list): - raise DistutilsSetupError( - "'ext_modules' option must be a list of Extension instances") - - for i, ext in enumerate(extensions): - if isinstance(ext, Extension): - continue # OK! (assume type-checking done - # by Extension constructor) - - if not isinstance(ext, tuple) or len(ext) != 2: - raise DistutilsSetupError( - "each element of 'ext_modules' option must be an " - "Extension instance or 2-tuple") - - ext_name, build_info = ext - - log.warn("old-style (ext_name, build_info) tuple found in " - "ext_modules for extension '%s' " - "-- please convert to Extension instance", ext_name) - - if not (isinstance(ext_name, str) and - extension_name_re.match(ext_name)): - raise DistutilsSetupError( - "first element of each tuple in 'ext_modules' " - "must be the extension name (a string)") - - if not isinstance(build_info, dict): - raise DistutilsSetupError( - "second element of each tuple in 'ext_modules' " - "must be a dictionary (build info)") - - # OK, the (ext_name, build_info) dict is type-safe: convert it - # to an Extension instance. - ext = Extension(ext_name, build_info['sources']) - - # Easy stuff: one-to-one mapping from dict elements to - # instance attributes. - for key in ('include_dirs', 'library_dirs', 'libraries', - 'extra_objects', 'extra_compile_args', - 'extra_link_args'): - val = build_info.get(key) - if val is not None: - setattr(ext, key, val) - - # Medium-easy stuff: same syntax/semantics, different names. - ext.runtime_library_dirs = build_info.get('rpath') - if 'def_file' in build_info: - log.warn("'def_file' element of build info dict " - "no longer supported") - - # Non-trivial stuff: 'macros' split into 'define_macros' - # and 'undef_macros'. - macros = build_info.get('macros') - if macros: - ext.define_macros = [] - ext.undef_macros = [] - for macro in macros: - if not (isinstance(macro, tuple) and len(macro) in (1, 2)): - raise DistutilsSetupError( - "'macros' element of build info dict " - "must be 1- or 2-tuple") - if len(macro) == 1: - ext.undef_macros.append(macro[0]) - elif len(macro) == 2: - ext.define_macros.append(macro) - - extensions[i] = ext - - def get_source_files(self): - self.check_extensions_list(self.extensions) - filenames = [] - - # Wouldn't it be neat if we knew the names of header files too... - for ext in self.extensions: - filenames.extend(ext.sources) - return filenames - - def get_outputs(self): - # Sanity check the 'extensions' list -- can't assume this is being - # done in the same run as a 'build_extensions()' call (in fact, we - # can probably assume that it *isn't*!). - self.check_extensions_list(self.extensions) - - # And build the list of output (built) filenames. Note that this - # ignores the 'inplace' flag, and assumes everything goes in the - # "build" tree. - outputs = [] - for ext in self.extensions: - outputs.append(self.get_ext_fullpath(ext.name)) - return outputs - - def build_extensions(self): - # First, sanity-check the 'extensions' list - self.check_extensions_list(self.extensions) - if self.parallel: - self._build_extensions_parallel() - else: - self._build_extensions_serial() - - def _build_extensions_parallel(self): - workers = self.parallel - if self.parallel is True: - workers = os.cpu_count() # may return None - try: - from concurrent.futures import ThreadPoolExecutor - except ImportError: - workers = None - - if workers is None: - self._build_extensions_serial() - return - - with ThreadPoolExecutor(max_workers=workers) as executor: - futures = [executor.submit(self.build_extension, ext) - for ext in self.extensions] - for ext, fut in zip(self.extensions, futures): - with self._filter_build_errors(ext): - fut.result() - - def _build_extensions_serial(self): - for ext in self.extensions: - with self._filter_build_errors(ext): - self.build_extension(ext) - - @contextlib.contextmanager - def _filter_build_errors(self, ext): - try: - yield - except (CCompilerError, DistutilsError, CompileError) as e: - if not ext.optional: - raise - self.warn('building extension "%s" failed: %s' % - (ext.name, e)) - - def build_extension(self, ext): - sources = ext.sources - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'ext_modules' option (extension '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % ext.name) - # sort to make the resulting .so file build reproducible - sources = sorted(sources) - - ext_path = self.get_ext_fullpath(ext.name) - depends = sources + ext.depends - if not (self.force or newer_group(depends, ext_path, 'newer')): - log.debug("skipping '%s' extension (up-to-date)", ext.name) - return - else: - log.info("building '%s' extension", ext.name) - - # First, scan the sources for SWIG definition files (.i), run - # SWIG on 'em to create .c files, and modify the sources list - # accordingly. - sources = self.swig_sources(sources, ext) - - # Next, compile the source code to object files. - - # XXX not honouring 'define_macros' or 'undef_macros' -- the - # CCompiler API needs to change to accommodate this, and I - # want to do one thing at a time! - - # Two possible sources for extra compiler arguments: - # - 'extra_compile_args' in Extension object - # - CFLAGS environment variable (not particularly - # elegant, but people seem to expect it and I - # guess it's useful) - # The environment variable should take precedence, and - # any sensible compiler will give precedence to later - # command line args. Hence we combine them in order: - extra_args = ext.extra_compile_args or [] - - macros = ext.define_macros[:] - for undef in ext.undef_macros: - macros.append((undef,)) - - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=ext.include_dirs, - debug=self.debug, - extra_postargs=extra_args, - depends=ext.depends) - - # XXX outdated variable, kept here in case third-part code - # needs it. - self._built_objects = objects[:] - - # Now link the object files together into a "shared object" -- - # of course, first we have to figure out all the other things - # that go into the mix. - if ext.extra_objects: - objects.extend(ext.extra_objects) - extra_args = ext.extra_link_args or [] - - # Detect target language, if not provided - language = ext.language or self.compiler.detect_language(sources) - - self.compiler.link_shared_object( - objects, ext_path, - libraries=self.get_libraries(ext), - library_dirs=ext.library_dirs, - runtime_library_dirs=ext.runtime_library_dirs, - extra_postargs=extra_args, - export_symbols=self.get_export_symbols(ext), - debug=self.debug, - build_temp=self.build_temp, - target_lang=language) - - def swig_sources(self, sources, extension): - """Walk the list of source files in 'sources', looking for SWIG - interface (.i) files. Run SWIG on all that are found, and - return a modified 'sources' list with SWIG source files replaced - by the generated C (or C++) files. - """ - new_sources = [] - swig_sources = [] - swig_targets = {} - - # XXX this drops generated C/C++ files into the source tree, which - # is fine for developers who want to distribute the generated - # source -- but there should be an option to put SWIG output in - # the temp dir. - - if self.swig_cpp: - log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") - - if self.swig_cpp or ('-c++' in self.swig_opts) or \ - ('-c++' in extension.swig_opts): - target_ext = '.cpp' - else: - target_ext = '.c' - - for source in sources: - (base, ext) = os.path.splitext(source) - if ext == ".i": # SWIG interface file - new_sources.append(base + '_wrap' + target_ext) - swig_sources.append(source) - swig_targets[source] = new_sources[-1] - else: - new_sources.append(source) - - if not swig_sources: - return new_sources - - swig = self.swig or self.find_swig() - swig_cmd = [swig, "-python"] - swig_cmd.extend(self.swig_opts) - if self.swig_cpp: - swig_cmd.append("-c++") - - # Do not override commandline arguments - if not self.swig_opts: - for o in extension.swig_opts: - swig_cmd.append(o) - - for source in swig_sources: - target = swig_targets[source] - log.info("swigging %s to %s", source, target) - self.spawn(swig_cmd + ["-o", target, source]) - - return new_sources - - def find_swig(self): - """Return the name of the SWIG executable. On Unix, this is - just "swig" -- it should be in the PATH. Tries a bit harder on - Windows. - """ - if os.name == "posix": - return "swig" - elif os.name == "nt": - # Look for SWIG in its standard installation directory on - # Windows (or so I presume!). If we find it there, great; - # if not, act like Unix and assume it's in the PATH. - for vers in ("1.3", "1.2", "1.1"): - fn = os.path.join("c:\\swig%s" % vers, "swig.exe") - if os.path.isfile(fn): - return fn - else: - return "swig.exe" - else: - raise DistutilsPlatformError( - "I don't know how to find (much less run) SWIG " - "on platform '%s'" % os.name) - - # -- Name generators ----------------------------------------------- - # (extension names, filenames, whatever) - def get_ext_fullpath(self, ext_name): - """Returns the path of the filename for a given extension. - - The file is located in `build_lib` or directly in the package - (inplace option). - """ - fullname = self.get_ext_fullname(ext_name) - modpath = fullname.split('.') - filename = self.get_ext_filename(modpath[-1]) - - if not self.inplace: - # no further work needed - # returning : - # build_dir/package/path/filename - filename = os.path.join(*modpath[:-1]+[filename]) - return os.path.join(self.build_lib, filename) - - # the inplace option requires to find the package directory - # using the build_py command for that - package = '.'.join(modpath[0:-1]) - build_py = self.get_finalized_command('build_py') - package_dir = os.path.abspath(build_py.get_package_dir(package)) - - # returning - # package_dir/filename - return os.path.join(package_dir, filename) - - def get_ext_fullname(self, ext_name): - """Returns the fullname of a given extension name. - - Adds the `package.` prefix""" - if self.package is None: - return ext_name - else: - return self.package + '.' + ext_name - - def get_ext_filename(self, ext_name): - r"""Convert the name of an extension (eg. "foo.bar") into the name - of the file from which it will be loaded (eg. "foo/bar.so", or - "foo\bar.pyd"). - """ - from distutils.sysconfig import get_config_var - ext_path = ext_name.split('.') - ext_suffix = get_config_var('EXT_SUFFIX') - return os.path.join(*ext_path) + ext_suffix - - def get_export_symbols(self, ext): - """Return the list of symbols that a shared extension has to - export. This either uses 'ext.export_symbols' or, if it's not - provided, "PyInit_" + module_name. Only relevant on Windows, where - the .pyd file (DLL) must export the module "PyInit_" function. - """ - suffix = '_' + ext.name.split('.')[-1] - try: - # Unicode module name support as defined in PEP-489 - # https://peps.python.org/pep-0489/#export-hook-name - suffix.encode('ascii') - except UnicodeEncodeError: - suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii') - - initfunc_name = "PyInit" + suffix - if initfunc_name not in ext.export_symbols: - ext.export_symbols.append(initfunc_name) - return ext.export_symbols - - def get_libraries(self, ext): - """Return the list of libraries to link against when building a - shared extension. On most platforms, this is just 'ext.libraries'; - on Windows, we add the Python library (eg. python20.dll). - """ - # The python library is always needed on Windows. For MSVC, this - # is redundant, since the library is mentioned in a pragma in - # pyconfig.h that MSVC groks. The other Windows compilers all seem - # to need it mentioned explicitly, though, so that's what we do. - # Append '_d' to the python import library on debug builds. - if sys.platform == "win32": - from distutils._msvccompiler import MSVCCompiler - if not isinstance(self.compiler, MSVCCompiler): - template = "python%d%d" - if self.debug: - template = template + '_d' - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] - else: - # On Android only the main executable and LD_PRELOADs are considered - # to be RTLD_GLOBAL, all the dependencies of the main executable - # remain RTLD_LOCAL and so the shared libraries must be linked with - # libpython when python is built with a shared python library (issue - # bpo-21536). - # On Cygwin (and if required, other POSIX-like platforms based on - # Windows like MinGW) it is simply necessary that all symbols in - # shared libraries are resolved at link time. - from distutils.sysconfig import get_config_var - link_libpython = False - if get_config_var('Py_ENABLE_SHARED'): - # A native build on an Android device or on Cygwin - if hasattr(sys, 'getandroidapilevel'): - link_libpython = True - elif sys.platform == 'cygwin': - link_libpython = True - elif '_PYTHON_HOST_PLATFORM' in os.environ: - # We are cross-compiling for one of the relevant platforms - if get_config_var('ANDROID_API_LEVEL') != 0: - link_libpython = True - elif get_config_var('MACHDEP') == 'cygwin': - link_libpython = True - - if link_libpython: - ldversion = get_config_var('LDVERSION') - return ext.libraries + ['python' + ldversion] - - return ext.libraries diff --git a/Lib/distutils/command/build_py.py b/Lib/distutils/command/build_py.py deleted file mode 100644 index edc2171cd122dd..00000000000000 --- a/Lib/distutils/command/build_py.py +++ /dev/null @@ -1,416 +0,0 @@ -"""distutils.command.build_py - -Implements the Distutils 'build_py' command.""" - -import os -import importlib.util -import sys -import glob - -from distutils.core import Command -from distutils.errors import * -from distutils.util import convert_path, Mixin2to3 -from distutils import log - -class build_py (Command): - - description = "\"build\" pure Python modules (copy to build directory)" - - user_options = [ - ('build-lib=', 'd', "directory to \"build\" (copy) to"), - ('compile', 'c', "compile .py to .pyc"), - ('no-compile', None, "don't compile .py files [default]"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('force', 'f', "forcibly build everything (ignore file timestamps)"), - ] - - boolean_options = ['compile', 'force'] - negative_opt = {'no-compile' : 'compile'} - - def initialize_options(self): - self.build_lib = None - self.py_modules = None - self.package = None - self.package_data = None - self.package_dir = None - self.compile = 0 - self.optimize = 0 - self.force = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('force', 'force')) - - # Get the distribution options that are aliases for build_py - # options -- list of packages and list of modules. - self.packages = self.distribution.packages - self.py_modules = self.distribution.py_modules - self.package_data = self.distribution.package_data - self.package_dir = {} - if self.distribution.package_dir: - for name, path in self.distribution.package_dir.items(): - self.package_dir[name] = convert_path(path) - self.data_files = self.get_data_files() - - # Ick, copied straight from install_lib.py (fancy_getopt needs a - # type system! Hell, *everything* needs a type system!!!) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - assert 0 <= self.optimize <= 2 - except (ValueError, AssertionError): - raise DistutilsOptionError("optimize must be 0, 1, or 2") - - def run(self): - # XXX copy_file by default preserves atime and mtime. IMHO this is - # the right thing to do, but perhaps it should be an option -- in - # particular, a site administrator might want installed files to - # reflect the time of installation rather than the last - # modification time before the installed release. - - # XXX copy_file by default preserves mode, which appears to be the - # wrong thing to do: if a file is read-only in the working - # directory, we want it to be installed read/write so that the next - # installation of the same module distribution can overwrite it - # without problems. (This might be a Unix-specific issue.) Thus - # we turn off 'preserve_mode' when copying to the build directory, - # since the build directory is supposed to be exactly what the - # installation will look like (ie. we preserve mode when - # installing). - - # Two options control which modules will be installed: 'packages' - # and 'py_modules'. The former lets us work with whole packages, not - # specifying individual modules at all; the latter is for - # specifying modules one-at-a-time. - - if self.py_modules: - self.build_modules() - if self.packages: - self.build_packages() - self.build_package_data() - - self.byte_compile(self.get_outputs(include_bytecode=0)) - - def get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - data = [] - if not self.packages: - return data - for package in self.packages: - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = 0 - if src_dir: - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = [] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) - # Files that match more than one pattern are only added once - files.extend([fn for fn in filelist if fn not in files - and os.path.isfile(fn)]) - return files - - def build_package_data(self): - """Copy data files into build directory""" - lastdir = None - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - self.copy_file(os.path.join(src_dir, filename), target, - preserve_mode=False) - - def get_package_dir(self, package): - """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any).""" - path = package.split('.') - - if not self.package_dir: - if path: - return os.path.join(*path) - else: - return '' - else: - tail = [] - while path: - try: - pdir = self.package_dir['.'.join(path)] - except KeyError: - tail.insert(0, path[-1]) - del path[-1] - else: - tail.insert(0, pdir) - return os.path.join(*tail) - else: - # Oops, got all the way through 'path' without finding a - # match in package_dir. If package_dir defines a directory - # for the root (nameless) package, then fallback on it; - # otherwise, we might as well have not consulted - # package_dir at all, as we just use the directory implied - # by 'tail' (which should be the same as the original value - # of 'path' at this point). - pdir = self.package_dir.get('') - if pdir is not None: - tail.insert(0, pdir) - - if tail: - return os.path.join(*tail) - else: - return '' - - def check_package(self, package, package_dir): - # Empty dir name means current directory, which we can probably - # assume exists. Also, os.path.exists and isdir don't know about - # my "empty string means current dir" convention, so we have to - # circumvent them. - if package_dir != "": - if not os.path.exists(package_dir): - raise DistutilsFileError( - "package directory '%s' does not exist" % package_dir) - if not os.path.isdir(package_dir): - raise DistutilsFileError( - "supposed package directory '%s' exists, " - "but is not a directory" % package_dir) - - # Require __init__.py for all but the "root package" - if package: - init_py = os.path.join(package_dir, "__init__.py") - if os.path.isfile(init_py): - return init_py - else: - log.warn(("package init file '%s' not found " + - "(or not a regular file)"), init_py) - - # Either not in a package at all (__init__.py not expected), or - # __init__.py doesn't exist -- so don't return the filename. - return None - - def check_module(self, module, module_file): - if not os.path.isfile(module_file): - log.warn("file %s (for module %s) not found", module_file, module) - return False - else: - return True - - def find_package_modules(self, package, package_dir): - self.check_package(package, package_dir) - module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) - modules = [] - setup_script = os.path.abspath(self.distribution.script_name) - - for f in module_files: - abs_f = os.path.abspath(f) - if abs_f != setup_script: - module = os.path.splitext(os.path.basename(f))[0] - modules.append((package, module, f)) - else: - self.debug_print("excluding %s" % setup_script) - return modules - - def find_modules(self): - """Finds individually-specified Python modules, ie. those listed by - module name in 'self.py_modules'. Returns a list of tuples (package, - module_base, filename): 'package' is a tuple of the path through - package-space to the module; 'module_base' is the bare (no - packages, no dots) module name, and 'filename' is the path to the - ".py" file (relative to the distribution root) that implements the - module. - """ - # Map package names to tuples of useful info about the package: - # (package_dir, checked) - # package_dir - the directory where we'll find source files for - # this package - # checked - true if we have checked that the package directory - # is valid (exists, contains __init__.py, ... ?) - packages = {} - - # List of (package, module, filename) tuples to return - modules = [] - - # We treat modules-in-packages almost the same as toplevel modules, - # just the "package" for a toplevel is empty (either an empty - # string or empty list, depending on context). Differences: - # - don't check for __init__.py in directory for empty package - for module in self.py_modules: - path = module.split('.') - package = '.'.join(path[0:-1]) - module_base = path[-1] - - try: - (package_dir, checked) = packages[package] - except KeyError: - package_dir = self.get_package_dir(package) - checked = 0 - - if not checked: - init_py = self.check_package(package, package_dir) - packages[package] = (package_dir, 1) - if init_py: - modules.append((package, "__init__", init_py)) - - # XXX perhaps we should also check for just .pyc files - # (so greedy closed-source bastards can distribute Python - # modules too) - module_file = os.path.join(package_dir, module_base + ".py") - if not self.check_module(module, module_file): - continue - - modules.append((package, module_base, module_file)) - - return modules - - def find_all_modules(self): - """Compute the list of all modules that will be built, whether - they are specified one-module-at-a-time ('self.py_modules') or - by whole packages ('self.packages'). Return a list of tuples - (package, module, module_file), just like 'find_modules()' and - 'find_package_modules()' do.""" - modules = [] - if self.py_modules: - modules.extend(self.find_modules()) - if self.packages: - for package in self.packages: - package_dir = self.get_package_dir(package) - m = self.find_package_modules(package, package_dir) - modules.extend(m) - return modules - - def get_source_files(self): - return [module[-1] for module in self.find_all_modules()] - - def get_module_outfile(self, build_dir, package, module): - outfile_path = [build_dir] + list(package) + [module + ".py"] - return os.path.join(*outfile_path) - - def get_outputs(self, include_bytecode=1): - modules = self.find_all_modules() - outputs = [] - for (package, module, module_file) in modules: - package = package.split('.') - filename = self.get_module_outfile(self.build_lib, package, module) - outputs.append(filename) - if include_bytecode: - if self.compile: - outputs.append(importlib.util.cache_from_source( - filename, optimization='')) - if self.optimize > 0: - outputs.append(importlib.util.cache_from_source( - filename, optimization=self.optimize)) - - outputs += [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir, filenames in self.data_files - for filename in filenames - ] - - return outputs - - def build_module(self, module, module_file, package): - if isinstance(package, str): - package = package.split('.') - elif not isinstance(package, (list, tuple)): - raise TypeError( - "'package' must be a string (dot-separated), list, or tuple") - - # Now put the module source file into the "build" area -- this is - # easy, we just copy it somewhere under self.build_lib (the build - # directory for Python source). - outfile = self.get_module_outfile(self.build_lib, package, module) - dir = os.path.dirname(outfile) - self.mkpath(dir) - return self.copy_file(module_file, outfile, preserve_mode=0) - - def build_modules(self): - modules = self.find_modules() - for (package, module, module_file) in modules: - # Now "build" the module -- ie. copy the source file to - # self.build_lib (the build directory for Python source). - # (Actually, it gets copied to the directory for this package - # under self.build_lib.) - self.build_module(module, module_file, package) - - def build_packages(self): - for package in self.packages: - # Get list of (package, module, module_file) tuples based on - # scanning the package directory. 'package' is only included - # in the tuple so that 'find_modules()' and - # 'find_package_tuples()' have a consistent interface; it's - # ignored here (apart from a sanity check). Also, 'module' is - # the *unqualified* module name (ie. no dots, no package -- we - # already know its package!), and 'module_file' is the path to - # the .py file, relative to the current directory - # (ie. including 'package_dir'). - package_dir = self.get_package_dir(package) - modules = self.find_package_modules(package, package_dir) - - # Now loop over the modules we found, "building" each one (just - # copy it to self.build_lib). - for (package_, module, module_file) in modules: - assert package == package_ - self.build_module(module, module_file, package) - - def byte_compile(self, files): - if sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - prefix = self.build_lib - if prefix[-1] != os.sep: - prefix = prefix + os.sep - - # XXX this code is essentially the same as the 'byte_compile() - # method of the "install_lib" command, except for the determination - # of the 'prefix' string. Hmmm. - if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=prefix, dry_run=self.dry_run) - if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=prefix, dry_run=self.dry_run) - -class build_py_2to3(build_py, Mixin2to3): - def run(self): - self.updated_files = [] - - # Base class code - if self.py_modules: - self.build_modules() - if self.packages: - self.build_packages() - self.build_package_data() - - # 2to3 - self.run_2to3(self.updated_files) - - # Remaining base class code - self.byte_compile(self.get_outputs(include_bytecode=0)) - - def build_module(self, module, module_file, package): - res = build_py.build_module(self, module, module_file, package) - if res[1]: - # file was copied - self.updated_files.append(res[0]) - return res diff --git a/Lib/distutils/command/build_scripts.py b/Lib/distutils/command/build_scripts.py deleted file mode 100644 index ccc70e6465016e..00000000000000 --- a/Lib/distutils/command/build_scripts.py +++ /dev/null @@ -1,160 +0,0 @@ -"""distutils.command.build_scripts - -Implements the Distutils 'build_scripts' command.""" - -import os, re -from stat import ST_MODE -from distutils import sysconfig -from distutils.core import Command -from distutils.dep_util import newer -from distutils.util import convert_path, Mixin2to3 -from distutils import log -import tokenize - -# check if Python is called on the first line with this expression -first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') - -class build_scripts(Command): - - description = "\"build\" scripts (copy and fixup #! line)" - - user_options = [ - ('build-dir=', 'd', "directory to \"build\" (copy) to"), - ('force', 'f', "forcibly build everything (ignore file timestamps"), - ('executable=', 'e', "specify final destination interpreter path"), - ] - - boolean_options = ['force'] - - - def initialize_options(self): - self.build_dir = None - self.scripts = None - self.force = None - self.executable = None - self.outfiles = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_scripts', 'build_dir'), - ('force', 'force'), - ('executable', 'executable')) - self.scripts = self.distribution.scripts - - def get_source_files(self): - return self.scripts - - def run(self): - if not self.scripts: - return - self.copy_scripts() - - - def copy_scripts(self): - r"""Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. - """ - self.mkpath(self.build_dir) - outfiles = [] - updated_files = [] - for script in self.scripts: - adjust = False - script = convert_path(script) - outfile = os.path.join(self.build_dir, os.path.basename(script)) - outfiles.append(outfile) - - if not self.force and not newer(script, outfile): - log.debug("not copying %s (up-to-date)", script) - continue - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, "rb") - except OSError: - if not self.dry_run: - raise - f = None - else: - encoding, lines = tokenize.detect_encoding(f.readline) - f.seek(0) - first_line = f.readline() - if not first_line: - self.warn("%s is an empty file (skipping)" % script) - continue - - match = first_line_re.match(first_line) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if adjust: - log.info("copying and adjusting %s -> %s", script, - self.build_dir) - updated_files.append(outfile) - if not self.dry_run: - if not sysconfig.python_build: - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) - executable = os.fsencode(executable) - shebang = b"#!" + executable + post_interp + b"\n" - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from utf-8".format(shebang)) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - try: - shebang.decode(encoding) - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from the script encoding ({})" - .format(shebang, encoding)) - with open(outfile, "wb") as outf: - outf.write(shebang) - outf.writelines(f.readlines()) - if f: - f.close() - else: - if f: - f.close() - updated_files.append(outfile) - self.copy_file(script, outfile) - - if os.name == 'posix': - for file in outfiles: - if self.dry_run: - log.info("changing mode of %s", file) - else: - oldmode = os.stat(file)[ST_MODE] & 0o7777 - newmode = (oldmode | 0o555) & 0o7777 - if newmode != oldmode: - log.info("changing mode of %s from %o to %o", - file, oldmode, newmode) - os.chmod(file, newmode) - # XXX should we modify self.outfiles? - return outfiles, updated_files - -class build_scripts_2to3(build_scripts, Mixin2to3): - - def copy_scripts(self): - outfiles, updated_files = build_scripts.copy_scripts(self) - if not self.dry_run: - self.run_2to3(updated_files) - return outfiles, updated_files diff --git a/Lib/distutils/command/check.py b/Lib/distutils/command/check.py deleted file mode 100644 index 73a30f3afd84a3..00000000000000 --- a/Lib/distutils/command/check.py +++ /dev/null @@ -1,148 +0,0 @@ -"""distutils.command.check - -Implements the Distutils 'check' command. -""" -from distutils.core import Command -from distutils.errors import DistutilsSetupError - -try: - # docutils is installed - from docutils.utils import Reporter - from docutils.parsers.rst import Parser - from docutils import frontend - from docutils import nodes - - class SilentReporter(Reporter): - - def __init__(self, source, report_level, halt_level, stream=None, - debug=0, encoding='ascii', error_handler='replace'): - self.messages = [] - Reporter.__init__(self, source, report_level, halt_level, stream, - debug, encoding, error_handler) - - def system_message(self, level, message, *children, **kwargs): - self.messages.append((level, message, children, kwargs)) - return nodes.system_message(message, level=level, - type=self.levels[level], - *children, **kwargs) - - HAS_DOCUTILS = True -except Exception: - # Catch all exceptions because exceptions besides ImportError probably - # indicate that docutils is not ported to Py3k. - HAS_DOCUTILS = False - -class check(Command): - """This command checks the meta-data of the package. - """ - description = ("perform some checks on the package") - user_options = [('metadata', 'm', 'Verify meta-data'), - ('restructuredtext', 'r', - ('Checks if long string meta-data syntax ' - 'are reStructuredText-compliant')), - ('strict', 's', - 'Will exit with an error if a check fails')] - - boolean_options = ['metadata', 'restructuredtext', 'strict'] - - def initialize_options(self): - """Sets default values for options.""" - self.restructuredtext = 0 - self.metadata = 1 - self.strict = 0 - self._warnings = 0 - - def finalize_options(self): - pass - - def warn(self, msg): - """Counts the number of warnings that occurs.""" - self._warnings += 1 - return Command.warn(self, msg) - - def run(self): - """Runs the command.""" - # perform the various tests - if self.metadata: - self.check_metadata() - if self.restructuredtext: - if HAS_DOCUTILS: - self.check_restructuredtext() - elif self.strict: - raise DistutilsSetupError('The docutils package is needed.') - - # let's raise an error in strict mode, if we have at least - # one warning - if self.strict and self._warnings > 0: - raise DistutilsSetupError('Please correct your package.') - - def check_metadata(self): - """Ensures that all required elements of meta-data are supplied. - - Required fields: - name, version, URL - - Recommended fields: - (author and author_email) or (maintainer and maintainer_email) - - Warns if any are missing. - """ - metadata = self.distribution.metadata - - missing = [] - for attr in ('name', 'version', 'url'): - if not (hasattr(metadata, attr) and getattr(metadata, attr)): - missing.append(attr) - - if missing: - self.warn("missing required meta-data: %s" % ', '.join(missing)) - if metadata.author: - if not metadata.author_email: - self.warn("missing meta-data: if 'author' supplied, " + - "'author_email' should be supplied too") - elif metadata.maintainer: - if not metadata.maintainer_email: - self.warn("missing meta-data: if 'maintainer' supplied, " + - "'maintainer_email' should be supplied too") - else: - self.warn("missing meta-data: either (author and author_email) " + - "or (maintainer and maintainer_email) " + - "should be supplied") - - def check_restructuredtext(self): - """Checks if the long string fields are reST-compliant.""" - data = self.distribution.get_long_description() - for warning in self._check_rst_data(data): - line = warning[-1].get('line') - if line is None: - warning = warning[1] - else: - warning = '%s (line %s)' % (warning[1], line) - self.warn(warning) - - def _check_rst_data(self, data): - """Returns warnings when the provided data doesn't compile.""" - # the include and csv_table directives need this to be a path - source_path = self.distribution.script_name or 'setup.py' - parser = Parser() - settings = frontend.OptionParser(components=(Parser,)).get_default_values() - settings.tab_width = 4 - settings.pep_references = None - settings.rfc_references = None - reporter = SilentReporter(source_path, - settings.report_level, - settings.halt_level, - stream=settings.warning_stream, - debug=settings.debug, - encoding=settings.error_encoding, - error_handler=settings.error_encoding_error_handler) - - document = nodes.document(settings, reporter, source=source_path) - document.note_source(source_path, -1) - try: - parser.parse(data, document) - except AttributeError as e: - reporter.messages.append( - (-1, 'Could not finish the parsing: %s.' % e, '', {})) - - return reporter.messages diff --git a/Lib/distutils/command/clean.py b/Lib/distutils/command/clean.py deleted file mode 100644 index 0cb270166211fe..00000000000000 --- a/Lib/distutils/command/clean.py +++ /dev/null @@ -1,76 +0,0 @@ -"""distutils.command.clean - -Implements the Distutils 'clean' command.""" - -# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18 - -import os -from distutils.core import Command -from distutils.dir_util import remove_tree -from distutils import log - -class clean(Command): - - description = "clean up temporary files from 'build' command" - user_options = [ - ('build-base=', 'b', - "base build directory (default: 'build.build-base')"), - ('build-lib=', None, - "build directory for all modules (default: 'build.build-lib')"), - ('build-temp=', 't', - "temporary build directory (default: 'build.build-temp')"), - ('build-scripts=', None, - "build directory for scripts (default: 'build.build-scripts')"), - ('bdist-base=', None, - "temporary directory for built distributions"), - ('all', 'a', - "remove all build output, not just temporary by-products") - ] - - boolean_options = ['all'] - - def initialize_options(self): - self.build_base = None - self.build_lib = None - self.build_temp = None - self.build_scripts = None - self.bdist_base = None - self.all = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib'), - ('build_scripts', 'build_scripts'), - ('build_temp', 'build_temp')) - self.set_undefined_options('bdist', - ('bdist_base', 'bdist_base')) - - def run(self): - # remove the build/temp.<plat> directory (unless it's already - # gone) - if os.path.exists(self.build_temp): - remove_tree(self.build_temp, dry_run=self.dry_run) - else: - log.debug("'%s' does not exist -- can't clean it", - self.build_temp) - - if self.all: - # remove build directories - for directory in (self.build_lib, - self.bdist_base, - self.build_scripts): - if os.path.exists(directory): - remove_tree(directory, dry_run=self.dry_run) - else: - log.warn("'%s' does not exist -- can't clean it", - directory) - - # just for the heck of it, try to remove the base build directory: - # we might have emptied it right now, but if not we don't care - if not self.dry_run: - try: - os.rmdir(self.build_base) - log.info("removing '%s'", self.build_base) - except OSError: - pass diff --git a/Lib/distutils/command/command_template b/Lib/distutils/command/command_template deleted file mode 100644 index 6106819db843b5..00000000000000 --- a/Lib/distutils/command/command_template +++ /dev/null @@ -1,33 +0,0 @@ -"""distutils.command.x - -Implements the Distutils 'x' command. -""" - -# created 2000/mm/dd, John Doe - -__revision__ = "$Id$" - -from distutils.core import Command - - -class x(Command): - - # Brief (40-50 characters) description of the command - description = "" - - # List of option tuples: long name, short name (None if no short - # name), and help string. - user_options = [('', '', - ""), - ] - - def initialize_options(self): - self. = None - self. = None - self. = None - - def finalize_options(self): - if self.x is None: - self.x = - - def run(self): diff --git a/Lib/distutils/command/config.py b/Lib/distutils/command/config.py deleted file mode 100644 index aeda408e731979..00000000000000 --- a/Lib/distutils/command/config.py +++ /dev/null @@ -1,344 +0,0 @@ -"""distutils.command.config - -Implements the Distutils 'config' command, a (mostly) empty command class -that exists mainly to be sub-classed by specific module distributions and -applications. The idea is that while every "config" command is different, -at least they're all named the same, and users always see "config" in the -list of standard commands. Also, this is a good place to put common -configure-like tasks: "try to compile this C code", or "figure out where -this header file lives". -""" - -import os, re - -from distutils.core import Command -from distutils.errors import DistutilsExecError -from distutils.sysconfig import customize_compiler -from distutils import log - -LANG_EXT = {"c": ".c", "c++": ".cxx"} - -class config(Command): - - description = "prepare to build" - - user_options = [ - ('compiler=', None, - "specify the compiler type"), - ('cc=', None, - "specify the compiler executable"), - ('include-dirs=', 'I', - "list of directories to search for header files"), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries"), - - ('noisy', None, - "show every action (compile, link, run, ...) taken"), - ('dump-source', None, - "dump generated source files before attempting to compile them"), - ] - - - # The three standard command methods: since the "config" command - # does nothing by default, these are empty. - - def initialize_options(self): - self.compiler = None - self.cc = None - self.include_dirs = None - self.libraries = None - self.library_dirs = None - - # maximal output for now - self.noisy = 1 - self.dump_source = 1 - - # list of temporary files generated along-the-way that we have - # to clean at some point - self.temp_files = [] - - def finalize_options(self): - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - elif isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - if self.libraries is None: - self.libraries = [] - elif isinstance(self.libraries, str): - self.libraries = [self.libraries] - - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - def run(self): - pass - - # Utility methods for actual "config" commands. The interfaces are - # loosely based on Autoconf macros of similar names. Sub-classes - # may use these freely. - - def _check_compiler(self): - """Check that 'self.compiler' really is a CCompiler object; - if not, make it one. - """ - # We do this late, and only on-demand, because this is an expensive - # import. - from distutils.ccompiler import CCompiler, new_compiler - if not isinstance(self.compiler, CCompiler): - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, force=1) - customize_compiler(self.compiler) - if self.include_dirs: - self.compiler.set_include_dirs(self.include_dirs) - if self.libraries: - self.compiler.set_libraries(self.libraries) - if self.library_dirs: - self.compiler.set_library_dirs(self.library_dirs) - - def _gen_temp_sourcefile(self, body, headers, lang): - filename = "_configtest" + LANG_EXT[lang] - with open(filename, "w") as file: - if headers: - for header in headers: - file.write("#include <%s>\n" % header) - file.write("\n") - file.write(body) - if body[-1] != "\n": - file.write("\n") - return filename - - def _preprocess(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - out = "_configtest.i" - self.temp_files.extend([src, out]) - self.compiler.preprocess(src, out, include_dirs=include_dirs) - return (src, out) - - def _compile(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - if self.dump_source: - dump_file(src, "compiling '%s':" % src) - (obj,) = self.compiler.object_filenames([src]) - self.temp_files.extend([src, obj]) - self.compiler.compile([src], include_dirs=include_dirs) - return (src, obj) - - def _link(self, body, headers, include_dirs, libraries, library_dirs, - lang): - (src, obj) = self._compile(body, headers, include_dirs, lang) - prog = os.path.splitext(os.path.basename(src))[0] - self.compiler.link_executable([obj], prog, - libraries=libraries, - library_dirs=library_dirs, - target_lang=lang) - - if self.compiler.exe_extension is not None: - prog = prog + self.compiler.exe_extension - self.temp_files.append(prog) - - return (src, obj, prog) - - def _clean(self, *filenames): - if not filenames: - filenames = self.temp_files - self.temp_files = [] - log.info("removing: %s", ' '.join(filenames)) - for filename in filenames: - try: - os.remove(filename) - except OSError: - pass - - - # XXX these ignore the dry-run flag: what to do, what to do? even if - # you want a dry-run build, you still need some sort of configuration - # info. My inclination is to make it up to the real config command to - # consult 'dry_run', and assume a default (minimal) configuration if - # true. The problem with trying to do it here is that you'd have to - # return either true or false from all the 'try' methods, neither of - # which is correct. - - # XXX need access to the header search path and maybe default macros. - - def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): - """Construct a source file from 'body' (a string containing lines - of C/C++ code) and 'headers' (a list of header files to include) - and run it through the preprocessor. Return true if the - preprocessor succeeded, false if there were any errors. - ('body' probably isn't of much use, but what the heck.) - """ - from distutils.ccompiler import CompileError - self._check_compiler() - ok = True - try: - self._preprocess(body, headers, include_dirs, lang) - except CompileError: - ok = False - - self._clean() - return ok - - def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, - lang="c"): - """Construct a source file (just like 'try_cpp()'), run it through - the preprocessor, and return true if any line of the output matches - 'pattern'. 'pattern' should either be a compiled regex object or a - string containing a regex. If both 'body' and 'headers' are None, - preprocesses an empty file -- which can be useful to determine the - symbols the preprocessor and compiler set by default. - """ - self._check_compiler() - src, out = self._preprocess(body, headers, include_dirs, lang) - - if isinstance(pattern, str): - pattern = re.compile(pattern) - - with open(out) as file: - match = False - while True: - line = file.readline() - if line == '': - break - if pattern.search(line): - match = True - break - - self._clean() - return match - - def try_compile(self, body, headers=None, include_dirs=None, lang="c"): - """Try to compile a source file built from 'body' and 'headers'. - Return true on success, false otherwise. - """ - from distutils.ccompiler import CompileError - self._check_compiler() - try: - self._compile(body, headers, include_dirs, lang) - ok = True - except CompileError: - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_link(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile and link a source file, built from 'body' and - 'headers', to executable form. Return true on success, false - otherwise. - """ - from distutils.ccompiler import CompileError, LinkError - self._check_compiler() - try: - self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - ok = True - except (CompileError, LinkError): - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_run(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile, link to an executable, and run a program - built from 'body' and 'headers'. Return true on success, false - otherwise. - """ - from distutils.ccompiler import CompileError, LinkError - self._check_compiler() - try: - src, obj, exe = self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - self.spawn([exe]) - ok = True - except (CompileError, LinkError, DistutilsExecError): - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - - # -- High-level methods -------------------------------------------- - # (these are the ones that are actually likely to be useful - # when implementing a real-world config command!) - - def check_func(self, func, headers=None, include_dirs=None, - libraries=None, library_dirs=None, decl=0, call=0): - """Determine if function 'func' is available by constructing a - source file that refers to 'func', and compiles and links it. - If everything succeeds, returns true; otherwise returns false. - - The constructed source file starts out by including the header - files listed in 'headers'. If 'decl' is true, it then declares - 'func' (as "int func()"); you probably shouldn't supply 'headers' - and set 'decl' true in the same call, or you might get errors about - a conflicting declarations for 'func'. Finally, the constructed - 'main()' function either references 'func' or (if 'call' is true) - calls it. 'libraries' and 'library_dirs' are used when - linking. - """ - self._check_compiler() - body = [] - if decl: - body.append("int %s ();" % func) - body.append("int main () {") - if call: - body.append(" %s();" % func) - else: - body.append(" %s;" % func) - body.append("}") - body = "\n".join(body) + "\n" - - return self.try_link(body, headers, include_dirs, - libraries, library_dirs) - - def check_lib(self, library, library_dirs=None, headers=None, - include_dirs=None, other_libraries=[]): - """Determine if 'library' is available to be linked against, - without actually checking that any particular symbols are provided - by it. 'headers' will be used in constructing the source file to - be compiled, but the only effect of this is to check if all the - header files listed are available. Any libraries listed in - 'other_libraries' will be included in the link, in case 'library' - has symbols that depend on other libraries. - """ - self._check_compiler() - return self.try_link("int main (void) { }", headers, include_dirs, - [library] + other_libraries, library_dirs) - - def check_header(self, header, include_dirs=None, library_dirs=None, - lang="c"): - """Determine if the system header file named by 'header_file' - exists and can be found by the preprocessor; return true if so, - false otherwise. - """ - return self.try_cpp(body="/* No body */", headers=[header], - include_dirs=include_dirs) - -def dump_file(filename, head=None): - """Dumps a file content into log.info. - - If head is not None, will be dumped before the file content. - """ - if head is None: - log.info('%s', filename) - else: - log.info(head) - file = open(filename) - try: - log.info(file.read()) - finally: - file.close() diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py deleted file mode 100644 index a22a5d094d7678..00000000000000 --- a/Lib/distutils/command/install.py +++ /dev/null @@ -1,679 +0,0 @@ -"""distutils.command.install - -Implements the Distutils 'install' command.""" - -import sys -import sysconfig -import os -import re - -from distutils import log -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.sysconfig import get_config_vars -from distutils.errors import DistutilsPlatformError -from distutils.file_util import write_file -from distutils.util import convert_path, subst_vars, change_root -from distutils.util import get_platform -from distutils.errors import DistutilsOptionError - -from site import USER_BASE -from site import USER_SITE - -HAS_USER_SITE = (USER_SITE is not None) - -# The keys to an installation scheme; if any new types of files are to be -# installed, be sure to add an entry to every scheme in -# sysconfig._INSTALL_SCHEMES, and to SCHEME_KEYS here. -SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data') - -# The following code provides backward-compatible INSTALL_SCHEMES -# while making the sysconfig module the single point of truth. -# This makes it easier for OS distributions where they need to -# alter locations for packages installations in a single place. -# Note that this module is deprecated (PEP 632); all consumers -# of this information should switch to using sysconfig directly. -INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}} - -# Copy from sysconfig._INSTALL_SCHEMES -for key in SCHEME_KEYS: - for distutils_scheme_name, sys_scheme_name in ( - ("unix_prefix", "posix_prefix"), ("unix_home", "posix_home"), - ("nt", "nt")): - sys_key = key - sys_scheme = sysconfig._INSTALL_SCHEMES[sys_scheme_name] - if key == "headers" and key not in sys_scheme: - # On POSIX-y platforms, Python will: - # - Build from .h files in 'headers' (only there when - # building CPython) - # - Install .h files to 'include' - # When 'headers' is missing, fall back to 'include' - sys_key = 'include' - INSTALL_SCHEMES[distutils_scheme_name][key] = sys_scheme[sys_key] - -# Transformation to different template format -for main_key in INSTALL_SCHEMES: - for key, value in INSTALL_SCHEMES[main_key].items(): - # Change all ocurences of {variable} to $variable - value = re.sub(r"\{(.+?)\}", r"$\g<1>", value) - value = value.replace("$installed_base", "$base") - value = value.replace("$py_version_nodot_plat", "$py_version_nodot") - if key == "headers": - value += "/$dist_name" - if sys.version_info >= (3, 9) and key == "platlib": - # platlibdir is available since 3.9: bpo-1294959 - value = value.replace("/lib/", "/$platlibdir/") - INSTALL_SCHEMES[main_key][key] = value - -# The following part of INSTALL_SCHEMES has a different definition -# than the one in sysconfig, but because both depend on the site module, -# the outcomes should be the same. -if HAS_USER_SITE: - INSTALL_SCHEMES['nt_user'] = { - 'purelib': '$usersite', - 'platlib': '$usersite', - 'headers': '$userbase/Python$py_version_nodot/Include/$dist_name', - 'scripts': '$userbase/Python$py_version_nodot/Scripts', - 'data' : '$userbase', - } - - INSTALL_SCHEMES['unix_user'] = { - 'purelib': '$usersite', - 'platlib': '$usersite', - 'headers': - '$userbase/include/python$py_version_short$abiflags/$dist_name', - 'scripts': '$userbase/bin', - 'data' : '$userbase', - } - - -class install(Command): - - description = "install everything from build directory" - - user_options = [ - # Select installation scheme and set base director(y|ies) - ('prefix=', None, - "installation prefix"), - ('exec-prefix=', None, - "(Unix only) prefix for platform-specific files"), - ('home=', None, - "(Unix only) home directory to install under"), - - # Or, just set the base director(y|ies) - ('install-base=', None, - "base installation directory (instead of --prefix or --home)"), - ('install-platbase=', None, - "base installation directory for platform-specific files " + - "(instead of --exec-prefix or --home)"), - ('root=', None, - "install everything relative to this alternate root directory"), - - # Or, explicitly set the installation scheme - ('install-purelib=', None, - "installation directory for pure Python module distributions"), - ('install-platlib=', None, - "installation directory for non-pure module distributions"), - ('install-lib=', None, - "installation directory for all module distributions " + - "(overrides --install-purelib and --install-platlib)"), - - ('install-headers=', None, - "installation directory for C/C++ headers"), - ('install-scripts=', None, - "installation directory for Python scripts"), - ('install-data=', None, - "installation directory for data files"), - - # Byte-compilation options -- see install_lib.py for details, as - # these are duplicated from there (but only install_lib does - # anything with them). - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - - # Miscellaneous control options - ('force', 'f', - "force installation (overwrite any existing files)"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - - # Where to install documentation (eventually!) - #('doc-format=', None, "format of documentation to generate"), - #('install-man=', None, "directory for Unix man pages"), - #('install-html=', None, "directory for HTML documentation"), - #('install-info=', None, "directory for GNU info files"), - - ('record=', None, - "filename in which to record list of installed files"), - ] - - boolean_options = ['compile', 'force', 'skip-build'] - - if HAS_USER_SITE: - user_options.append(('user', None, - "install in user site-package '%s'" % USER_SITE)) - boolean_options.append('user') - - negative_opt = {'no-compile' : 'compile'} - - - def initialize_options(self): - """Initializes options.""" - # High-level options: these select both an installation base - # and scheme. - self.prefix = None - self.exec_prefix = None - self.home = None - self.user = 0 - - # These select only the installation base; it's up to the user to - # specify the installation scheme (currently, that means supplying - # the --install-{platlib,purelib,scripts,data} options). - self.install_base = None - self.install_platbase = None - self.root = None - - # These options are the actual installation directories; if not - # supplied by the user, they are filled in using the installation - # scheme implied by prefix/exec-prefix/home and the contents of - # that installation scheme. - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - if HAS_USER_SITE: - self.install_userbase = USER_BASE - self.install_usersite = USER_SITE - - self.compile = None - self.optimize = None - - # Deprecated - # These two are for putting non-packagized distributions into their - # own directory and creating a .pth file if it makes sense. - # 'extra_path' comes from the setup file; 'install_path_file' can - # be turned off if it makes no sense to install a .pth file. (But - # better to install it uselessly than to guess wrong and not - # install it when it's necessary and would be used!) Currently, - # 'install_path_file' is always true unless some outsider meddles - # with it. - self.extra_path = None - self.install_path_file = 1 - - # 'force' forces installation, even if target files are not - # out-of-date. 'skip_build' skips running the "build" command, - # handy if you know it's not necessary. 'warn_dir' (which is *not* - # a user option, it's just there so the bdist_* commands can turn - # it off) determines whether we warn about installing to a - # directory not in sys.path. - self.force = 0 - self.skip_build = 0 - self.warn_dir = 1 - - # These are only here as a conduit from the 'build' command to the - # 'install_*' commands that do the real work. ('build_base' isn't - # actually used anywhere, but it might be useful in future.) They - # are not user options, because if the user told the install - # command where the build directory is, that wouldn't affect the - # build command. - self.build_base = None - self.build_lib = None - - # Not defined yet because we don't know anything about - # documentation yet. - #self.install_man = None - #self.install_html = None - #self.install_info = None - - self.record = None - - - # -- Option finalizing methods ------------------------------------- - # (This is rather more involved than for most commands, - # because this is where the policy for installing third- - # party Python modules on various platforms given a wide - # array of user input is decided. Yes, it's quite complex!) - - def finalize_options(self): - """Finalizes options.""" - # This method (and its helpers, like 'finalize_unix()', - # 'finalize_other()', and 'select_scheme()') is where the default - # installation directories for modules, extension modules, and - # anything else we care to install from a Python module - # distribution. Thus, this code makes a pretty important policy - # statement about how third-party stuff is added to a Python - # installation! Note that the actual work of installation is done - # by the relatively simple 'install_*' commands; they just take - # their orders from the installation directory options determined - # here. - - # Check for errors/inconsistencies in the options; first, stuff - # that's wrong on any platform. - - if ((self.prefix or self.exec_prefix or self.home) and - (self.install_base or self.install_platbase)): - raise DistutilsOptionError( - "must supply either prefix/exec-prefix/home or " + - "install-base/install-platbase -- not both") - - if self.home and (self.prefix or self.exec_prefix): - raise DistutilsOptionError( - "must supply either home or prefix/exec-prefix -- not both") - - if self.user and (self.prefix or self.exec_prefix or self.home or - self.install_base or self.install_platbase): - raise DistutilsOptionError("can't combine user with prefix, " - "exec_prefix/home, or install_(plat)base") - - # Next, stuff that's wrong (or dubious) only on certain platforms. - if os.name != "posix": - if self.exec_prefix: - self.warn("exec-prefix option ignored on this platform") - self.exec_prefix = None - - # Now the interesting logic -- so interesting that we farm it out - # to other methods. The goal of these methods is to set the final - # values for the install_{lib,scripts,data,...} options, using as - # input a heady brew of prefix, exec_prefix, home, install_base, - # install_platbase, user-supplied versions of - # install_{purelib,platlib,lib,scripts,data,...}, and the - # INSTALL_SCHEME dictionary above. Phew! - - self.dump_dirs("pre-finalize_{unix,other}") - - if os.name == 'posix': - self.finalize_unix() - else: - self.finalize_other() - - self.dump_dirs("post-finalize_{unix,other}()") - - # Expand configuration variables, tilde, etc. in self.install_base - # and self.install_platbase -- that way, we can use $base or - # $platbase in the other installation directories and not worry - # about needing recursive variable expansion (shudder). - - py_version = sys.version.split()[0] - (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix') - try: - abiflags = sys.abiflags - except AttributeError: - # sys.abiflags may not be defined on all platforms. - abiflags = '' - self.config_vars = {'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': '%d.%d' % sys.version_info[:2], - 'py_version_nodot': '%d%d' % sys.version_info[:2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - 'abiflags': abiflags, - 'platlibdir': sys.platlibdir, - } - - if HAS_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - if sysconfig.is_python_build(): - self.config_vars['srcdir'] = sysconfig.get_config_var('srcdir') - - self.expand_basedirs() - - self.dump_dirs("post-expand_basedirs()") - - # Now define config vars for the base directories so we can expand - # everything else. - self.config_vars['base'] = self.install_base - self.config_vars['platbase'] = self.install_platbase - - if DEBUG: - from pprint import pprint - print("config vars:") - pprint(self.config_vars) - - # Expand "~" and configuration variables in the installation - # directories. - self.expand_dirs() - - self.dump_dirs("post-expand_dirs()") - - # Create directories in the home dir: - if self.user: - self.create_home_path() - - # Pick the actual directory to install all modules to: either - # install_purelib or install_platlib, depending on whether this - # module distribution is pure or not. Of course, if the user - # already specified install_lib, use their selection. - if self.install_lib is None: - if self.distribution.ext_modules: # has extensions: non-pure - self.install_lib = self.install_platlib - else: - self.install_lib = self.install_purelib - - - # Convert directories from Unix /-separated syntax to the local - # convention. - self.convert_paths('lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') - if HAS_USER_SITE: - self.convert_paths('userbase', 'usersite') - - # Deprecated - # Well, we're not actually fully completely finalized yet: we still - # have to deal with 'extra_path', which is the hack for allowing - # non-packagized module distributions (hello, Numerical Python!) to - # get their own directories. - self.handle_extra_path() - self.install_libbase = self.install_lib # needed for .pth file - self.install_lib = os.path.join(self.install_lib, self.extra_dirs) - - # If a new root directory was supplied, make all the installation - # dirs relative to it. - if self.root is not None: - self.change_roots('libbase', 'lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') - - self.dump_dirs("after prepending root") - - # Find out the build directories, ie. where to install from. - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib')) - - # Punt on doc directories for now -- after all, we're punting on - # documentation completely! - - def dump_dirs(self, msg): - """Dumps the list of user options.""" - if not DEBUG: - return - from distutils.fancy_getopt import longopt_xlate - log.debug(msg + ":") - for opt in self.user_options: - opt_name = opt[0] - if opt_name[-1] == "=": - opt_name = opt_name[0:-1] - if opt_name in self.negative_opt: - opt_name = self.negative_opt[opt_name] - opt_name = opt_name.translate(longopt_xlate) - val = not getattr(self, opt_name) - else: - opt_name = opt_name.translate(longopt_xlate) - val = getattr(self, opt_name) - log.debug(" %s: %s", opt_name, val) - - def finalize_unix(self): - """Finalizes options for posix platforms.""" - if self.install_base is not None or self.install_platbase is not None: - if ((self.install_lib is None and - self.install_purelib is None and - self.install_platlib is None) or - self.install_headers is None or - self.install_scripts is None or - self.install_data is None): - raise DistutilsOptionError( - "install-base or install-platbase supplied, but " - "installation scheme is incomplete") - return - - if self.user: - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme("unix_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("unix_home") - else: - if self.prefix is None: - if self.exec_prefix is not None: - raise DistutilsOptionError( - "must not supply exec-prefix without prefix") - - self.prefix = os.path.normpath(sys.prefix) - self.exec_prefix = os.path.normpath(sys.exec_prefix) - - else: - if self.exec_prefix is None: - self.exec_prefix = self.prefix - - self.install_base = self.prefix - self.install_platbase = self.exec_prefix - self.select_scheme("unix_prefix") - - def finalize_other(self): - """Finalizes options for non-posix platforms""" - if self.user: - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme(os.name + "_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("unix_home") - else: - if self.prefix is None: - self.prefix = os.path.normpath(sys.prefix) - - self.install_base = self.install_platbase = self.prefix - try: - self.select_scheme(os.name) - except KeyError: - raise DistutilsPlatformError( - "I don't know how to install stuff on '%s'" % os.name) - - def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) - - def convert_paths(self, *names): - """Call `convert_path` over `names`.""" - for name in names: - attr = "install_" + name - setattr(self, attr, convert_path(getattr(self, attr))) - - def handle_extra_path(self): - """Set `path_file` and `extra_dirs` using `extra_path`.""" - if self.extra_path is None: - self.extra_path = self.distribution.extra_path - - if self.extra_path is not None: - log.warn( - "Distribution option extra_path is deprecated. " - "See issue27919 for details." - ) - if isinstance(self.extra_path, str): - self.extra_path = self.extra_path.split(',') - - if len(self.extra_path) == 1: - path_file = extra_dirs = self.extra_path[0] - elif len(self.extra_path) == 2: - path_file, extra_dirs = self.extra_path - else: - raise DistutilsOptionError( - "'extra_path' option must be a list, tuple, or " - "comma-separated string with 1 or 2 elements") - - # convert to local form in case Unix notation used (as it - # should be in setup scripts) - extra_dirs = convert_path(extra_dirs) - else: - path_file = None - extra_dirs = '' - - # XXX should we warn if path_file and not extra_dirs? (in which - # case the path file would be harmless but pointless) - self.path_file = path_file - self.extra_dirs = extra_dirs - - def change_roots(self, *names): - """Change the install directories pointed by name using root.""" - for name in names: - attr = "install_" + name - setattr(self, attr, change_root(self.root, getattr(self, attr))) - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in self.config_vars.items(): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0o700)" % path) - os.makedirs(path, 0o700) - - # -- Command execution methods ------------------------------------- - - def run(self): - """Runs the command.""" - # Obviously have to build before we can install - if not self.skip_build: - self.run_command('build') - # If we built for any other platform, we can't install. - build_plat = self.distribution.get_command_obj('build').plat_name - # check warn_dir - it is a clue that the 'install' is happening - # internally, and not to sys.path, so we don't check the platform - # matches what we are running. - if self.warn_dir and build_plat != get_platform(): - raise DistutilsPlatformError("Can't install when " - "cross-compiling") - - # Run all sub-commands (at least those that need to be run) - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - if self.path_file: - self.create_path_file() - - # write list of installed files, if requested. - if self.record: - outputs = self.get_outputs() - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - self.execute(write_file, - (self.record, outputs), - "writing list of installed files to '%s'" % - self.record) - - sys_path = map(os.path.normpath, sys.path) - sys_path = map(os.path.normcase, sys_path) - install_lib = os.path.normcase(os.path.normpath(self.install_lib)) - if (self.warn_dir and - not (self.path_file and self.install_path_file) and - install_lib not in sys_path): - log.debug(("modules installed to '%s', which is not in " - "Python's module search path (sys.path) -- " - "you'll have to change the search path yourself"), - self.install_lib) - - def create_path_file(self): - """Creates the .pth file""" - filename = os.path.join(self.install_libbase, - self.path_file + ".pth") - if self.install_path_file: - self.execute(write_file, - (filename, [self.extra_dirs]), - "creating %s" % filename) - else: - self.warn("path file '%s' not created" % filename) - - - # -- Reporting methods --------------------------------------------- - - def get_outputs(self): - """Assembles the outputs of all the sub-commands.""" - outputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - # Add the contents of cmd.get_outputs(), ensuring - # that outputs doesn't contain duplicate entries - for filename in cmd.get_outputs(): - if filename not in outputs: - outputs.append(filename) - - if self.path_file and self.install_path_file: - outputs.append(os.path.join(self.install_libbase, - self.path_file + ".pth")) - - return outputs - - def get_inputs(self): - """Returns the inputs of all the sub-commands""" - # XXX gee, this looks familiar ;-( - inputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - inputs.extend(cmd.get_inputs()) - - return inputs - - # -- Predicates for sub-command list ------------------------------- - - def has_lib(self): - """Returns true if the current distribution has any Python - modules to install.""" - return (self.distribution.has_pure_modules() or - self.distribution.has_ext_modules()) - - def has_headers(self): - """Returns true if the current distribution has any headers to - install.""" - return self.distribution.has_headers() - - def has_scripts(self): - """Returns true if the current distribution has any scripts to. - install.""" - return self.distribution.has_scripts() - - def has_data(self): - """Returns true if the current distribution has any data to. - install.""" - return self.distribution.has_data_files() - - # 'sub_commands': a list of commands this command might have to run to - # get its work done. See cmd.py for more info. - sub_commands = [('install_lib', has_lib), - ('install_headers', has_headers), - ('install_scripts', has_scripts), - ('install_data', has_data), - ('install_egg_info', lambda self:True), - ] diff --git a/Lib/distutils/command/install_data.py b/Lib/distutils/command/install_data.py deleted file mode 100644 index 947cd76a99e5fd..00000000000000 --- a/Lib/distutils/command/install_data.py +++ /dev/null @@ -1,79 +0,0 @@ -"""distutils.command.install_data - -Implements the Distutils 'install_data' command, for installing -platform-independent data files.""" - -# contributed by Bastian Kleineidam - -import os -from distutils.core import Command -from distutils.util import change_root, convert_path - -class install_data(Command): - - description = "install data files" - - user_options = [ - ('install-dir=', 'd', - "base directory for installing data files " - "(default: installation base dir)"), - ('root=', None, - "install everything relative to this alternate root directory"), - ('force', 'f', "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.outfiles = [] - self.root = None - self.force = 0 - self.data_files = self.distribution.data_files - self.warn_dir = 1 - - def finalize_options(self): - self.set_undefined_options('install', - ('install_data', 'install_dir'), - ('root', 'root'), - ('force', 'force'), - ) - - def run(self): - self.mkpath(self.install_dir) - for f in self.data_files: - if isinstance(f, str): - # it's a simple file, so copy it - f = convert_path(f) - if self.warn_dir: - self.warn("setup script did not provide a directory for " - "'%s' -- installing right in '%s'" % - (f, self.install_dir)) - (out, _) = self.copy_file(f, self.install_dir) - self.outfiles.append(out) - else: - # it's a tuple with path to install to and a list of files - dir = convert_path(f[0]) - if not os.path.isabs(dir): - dir = os.path.join(self.install_dir, dir) - elif self.root: - dir = change_root(self.root, dir) - self.mkpath(dir) - - if f[1] == []: - # If there are no files listed, the user must be - # trying to create an empty directory, so add the - # directory to the list of output files. - self.outfiles.append(dir) - else: - # Copy files, adding them to the list of output files. - for data in f[1]: - data = convert_path(data) - (out, _) = self.copy_file(data, dir) - self.outfiles.append(out) - - def get_inputs(self): - return self.data_files or [] - - def get_outputs(self): - return self.outfiles diff --git a/Lib/distutils/command/install_egg_info.py b/Lib/distutils/command/install_egg_info.py deleted file mode 100644 index 0ddc7367cc608d..00000000000000 --- a/Lib/distutils/command/install_egg_info.py +++ /dev/null @@ -1,77 +0,0 @@ -"""distutils.command.install_egg_info - -Implements the Distutils 'install_egg_info' command, for installing -a package's PKG-INFO metadata.""" - - -from distutils.cmd import Command -from distutils import log, dir_util -import os, sys, re - -class install_egg_info(Command): - """Install an .egg-info file for the package""" - - description = "Install package's PKG-INFO metadata as an .egg-info file" - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - basename = "%s-%s-py%d.%d.egg-info" % ( - to_filename(safe_name(self.distribution.get_name())), - to_filename(safe_version(self.distribution.get_version())), - *sys.version_info[:2] - ) - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - target = self.target - if os.path.isdir(target) and not os.path.islink(target): - dir_util.remove_tree(target, dry_run=self.dry_run) - elif os.path.exists(target): - self.execute(os.unlink,(self.target,),"Removing "+target) - elif not os.path.isdir(self.install_dir): - self.execute(os.makedirs, (self.install_dir,), - "Creating "+self.install_dir) - log.info("Writing %s", target) - if not self.dry_run: - with open(target, 'w', encoding='UTF-8') as f: - self.distribution.metadata.write_pkg_file(f) - - def get_outputs(self): - return self.outputs - - -# The following routines are taken from setuptools' pkg_resources module and -# can be replaced by importing them from pkg_resources once it is included -# in the stdlib. - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') diff --git a/Lib/distutils/command/install_headers.py b/Lib/distutils/command/install_headers.py deleted file mode 100644 index 9bb0b18dc0d809..00000000000000 --- a/Lib/distutils/command/install_headers.py +++ /dev/null @@ -1,47 +0,0 @@ -"""distutils.command.install_headers - -Implements the Distutils 'install_headers' command, to install C/C++ header -files to the Python include directory.""" - -from distutils.core import Command - - -# XXX force is never used -class install_headers(Command): - - description = "install C/C++ header files" - - user_options = [('install-dir=', 'd', - "directory to install header files to"), - ('force', 'f', - "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.force = 0 - self.outfiles = [] - - def finalize_options(self): - self.set_undefined_options('install', - ('install_headers', 'install_dir'), - ('force', 'force')) - - - def run(self): - headers = self.distribution.headers - if not headers: - return - - self.mkpath(self.install_dir) - for header in headers: - (out, _) = self.copy_file(header, self.install_dir) - self.outfiles.append(out) - - def get_inputs(self): - return self.distribution.headers or [] - - def get_outputs(self): - return self.outfiles diff --git a/Lib/distutils/command/install_lib.py b/Lib/distutils/command/install_lib.py deleted file mode 100644 index 6154cf09431f72..00000000000000 --- a/Lib/distutils/command/install_lib.py +++ /dev/null @@ -1,217 +0,0 @@ -"""distutils.command.install_lib - -Implements the Distutils 'install_lib' command -(install all Python modules).""" - -import os -import importlib.util -import sys - -from distutils.core import Command -from distutils.errors import DistutilsOptionError - - -# Extension for Python source files. -PYTHON_SOURCE_EXTENSION = ".py" - -class install_lib(Command): - - description = "install all Python modules (extensions and pure Python)" - - # The byte-compilation options are a tad confusing. Here are the - # possible scenarios: - # 1) no compilation at all (--no-compile --no-optimize) - # 2) compile .pyc only (--compile --no-optimize; default) - # 3) compile .pyc and "opt-1" .pyc (--compile --optimize) - # 4) compile "opt-1" .pyc only (--no-compile --optimize) - # 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more) - # 6) compile "opt-2" .pyc only (--no-compile --optimize-more) - # - # The UI for this is two options, 'compile' and 'optimize'. - # 'compile' is strictly boolean, and only decides whether to - # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and - # decides both whether to generate .pyc files and what level of - # optimization to use. - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ('build-dir=','b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'compile', 'skip-build'] - negative_opt = {'no-compile' : 'compile'} - - def initialize_options(self): - # let the 'install' command dictate our installation directory - self.install_dir = None - self.build_dir = None - self.force = 0 - self.compile = None - self.optimize = None - self.skip_build = None - - def finalize_options(self): - # Get all the information we need to install pure Python modules - # from the umbrella 'install' command -- build (source) directory, - # install (target) directory, and whether to compile .py files. - self.set_undefined_options('install', - ('build_lib', 'build_dir'), - ('install_lib', 'install_dir'), - ('force', 'force'), - ('compile', 'compile'), - ('optimize', 'optimize'), - ('skip_build', 'skip_build'), - ) - - if self.compile is None: - self.compile = True - if self.optimize is None: - self.optimize = False - - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if self.optimize not in (0, 1, 2): - raise AssertionError - except (ValueError, AssertionError): - raise DistutilsOptionError("optimize must be 0, 1, or 2") - - def run(self): - # Make sure we have built everything we need first - self.build() - - # Install everything: simply dump the entire contents of the build - # directory to the installation directory (that's the beauty of - # having a build directory!) - outfiles = self.install() - - # (Optionally) compile .py to .pyc - if outfiles is not None and self.distribution.has_pure_modules(): - self.byte_compile(outfiles) - - # -- Top-level worker functions ------------------------------------ - # (called from 'run()') - - def build(self): - if not self.skip_build: - if self.distribution.has_pure_modules(): - self.run_command('build_py') - if self.distribution.has_ext_modules(): - self.run_command('build_ext') - - def install(self): - if os.path.isdir(self.build_dir): - outfiles = self.copy_tree(self.build_dir, self.install_dir) - else: - self.warn("'%s' does not exist -- no Python modules to install" % - self.build_dir) - return - return outfiles - - def byte_compile(self, files): - if sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - - # Get the "--root" directory supplied to the "install" command, - # and use it as a prefix to strip off the purported filename - # encoded in bytecode files. This is far from complete, but it - # should at least generate usable bytecode in RPM distributions. - install_root = self.get_finalized_command('install').root - - if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=install_root, - dry_run=self.dry_run) - if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=install_root, - verbose=self.verbose, dry_run=self.dry_run) - - - # -- Utility methods ----------------------------------------------- - - def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir): - if not has_any: - return [] - - build_cmd = self.get_finalized_command(build_cmd) - build_files = build_cmd.get_outputs() - build_dir = getattr(build_cmd, cmd_option) - - prefix_len = len(build_dir) + len(os.sep) - outputs = [] - for file in build_files: - outputs.append(os.path.join(output_dir, file[prefix_len:])) - - return outputs - - def _bytecode_filenames(self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - # Since build_py handles package data installation, the - # list of outputs can contain more than just .py files. - # Make sure we only report bytecode for the .py files. - ext = os.path.splitext(os.path.normcase(py_file))[1] - if ext != PYTHON_SOURCE_EXTENSION: - continue - if self.compile: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization='')) - if self.optimize > 0: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization=self.optimize)) - - return bytecode_files - - - # -- External interface -------------------------------------------- - # (called by outsiders) - - def get_outputs(self): - """Return the list of files that would be installed if this command - were actually run. Not affected by the "dry-run" flag or whether - modules have actually been built yet. - """ - pure_outputs = \ - self._mutate_outputs(self.distribution.has_pure_modules(), - 'build_py', 'build_lib', - self.install_dir) - if self.compile: - bytecode_outputs = self._bytecode_filenames(pure_outputs) - else: - bytecode_outputs = [] - - ext_outputs = \ - self._mutate_outputs(self.distribution.has_ext_modules(), - 'build_ext', 'build_lib', - self.install_dir) - - return pure_outputs + bytecode_outputs + ext_outputs - - def get_inputs(self): - """Get the list of files that are input to this command, ie. the - files that get installed as they are named in the build tree. - The files in this list correspond one-to-one to the output - filenames returned by 'get_outputs()'. - """ - inputs = [] - - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - inputs.extend(build_py.get_outputs()) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - inputs.extend(build_ext.get_outputs()) - - return inputs diff --git a/Lib/distutils/command/install_scripts.py b/Lib/distutils/command/install_scripts.py deleted file mode 100644 index 31a1130ee54937..00000000000000 --- a/Lib/distutils/command/install_scripts.py +++ /dev/null @@ -1,60 +0,0 @@ -"""distutils.command.install_scripts - -Implements the Distutils 'install_scripts' command, for installing -Python scripts.""" - -# contributed by Bastian Kleineidam - -import os -from distutils.core import Command -from distutils import log -from stat import ST_MODE - - -class install_scripts(Command): - - description = "install scripts (Python or otherwise)" - - user_options = [ - ('install-dir=', 'd', "directory to install scripts to"), - ('build-dir=','b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'skip-build'] - - def initialize_options(self): - self.install_dir = None - self.force = 0 - self.build_dir = None - self.skip_build = None - - def finalize_options(self): - self.set_undefined_options('build', ('build_scripts', 'build_dir')) - self.set_undefined_options('install', - ('install_scripts', 'install_dir'), - ('force', 'force'), - ('skip_build', 'skip_build'), - ) - - def run(self): - if not self.skip_build: - self.run_command('build_scripts') - self.outfiles = self.copy_tree(self.build_dir, self.install_dir) - if os.name == 'posix': - # Set the executable bits (owner, group, and world) on - # all the scripts we just installed. - for file in self.get_outputs(): - if self.dry_run: - log.info("changing mode of %s", file) - else: - mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777 - log.info("changing mode of %s to %o", file, mode) - os.chmod(file, mode) - - def get_inputs(self): - return self.distribution.scripts or [] - - def get_outputs(self): - return self.outfiles or [] diff --git a/Lib/distutils/command/register.py b/Lib/distutils/command/register.py deleted file mode 100644 index 170f5497141c9d..00000000000000 --- a/Lib/distutils/command/register.py +++ /dev/null @@ -1,304 +0,0 @@ -"""distutils.command.register - -Implements the Distutils 'register' command (register with the repository). -""" - -# created 2002/10/21, Richard Jones - -import getpass -import io -import urllib.parse, urllib.request -from warnings import warn - -from distutils.core import PyPIRCCommand -from distutils.errors import * -from distutils import log - -class register(PyPIRCCommand): - - description = ("register the distribution with the Python package index") - user_options = PyPIRCCommand.user_options + [ - ('list-classifiers', None, - 'list the valid Trove classifiers'), - ('strict', None , - 'Will stop the registering if the meta-data are not fully compliant') - ] - boolean_options = PyPIRCCommand.boolean_options + [ - 'verify', 'list-classifiers', 'strict'] - - sub_commands = [('check', lambda self: True)] - - def initialize_options(self): - PyPIRCCommand.initialize_options(self) - self.list_classifiers = 0 - self.strict = 0 - - def finalize_options(self): - PyPIRCCommand.finalize_options(self) - # setting options for the `check` subcommand - check_options = {'strict': ('register', self.strict), - 'restructuredtext': ('register', 1)} - self.distribution.command_options['check'] = check_options - - def run(self): - self.finalize_options() - self._set_config() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - if self.dry_run: - self.verify_metadata() - elif self.list_classifiers: - self.classifiers() - else: - self.send_metadata() - - def check_metadata(self): - """Deprecated API.""" - warn("distutils.command.register.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) - check = self.distribution.get_command_obj('check') - check.ensure_finalized() - check.strict = self.strict - check.restructuredtext = 1 - check.run() - - def _set_config(self): - ''' Reads the configuration file and set attributes. - ''' - config = self._read_pypirc() - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - self.has_config = True - else: - if self.repository not in ('pypi', self.DEFAULT_REPOSITORY): - raise ValueError('%s not found in .pypirc' % self.repository) - if self.repository == 'pypi': - self.repository = self.DEFAULT_REPOSITORY - self.has_config = False - - def classifiers(self): - ''' Fetch the list of classifiers from the server. - ''' - url = self.repository+'?:action=list_classifiers' - response = urllib.request.urlopen(url) - log.info(self._read_pypi_response(response)) - - def verify_metadata(self): - ''' Send the metadata to the package index server to be checked. - ''' - # send the info to the server and report the result - (code, result) = self.post_to_server(self.build_post_data('verify')) - log.info('Server response (%s): %s', code, result) - - def send_metadata(self): - ''' Send the metadata to the package index server. - - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. - - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: - - [distutils] - index-servers = - pypi - - [pypi] - username: fred - password: sekrit - - Otherwise, to figure who the user is, we offer the user three - choices: - - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. - - ''' - # see if we can short-cut and get the username/password from the - # config - if self.has_config: - choice = '1' - username = self.username - password = self.password - else: - choice = 'x' - username = password = '' - - # get the user's login info - choices = '1 2 3 4'.split() - while choice not in choices: - self.announce('''\ -We need to know who you are, so please choose either: - 1. use your existing login, - 2. register as a new user, - 3. have the server generate a new password for you (and email it to you), or - 4. quit -Your selection [default 1]: ''', log.INFO) - choice = input() - if not choice: - choice = '1' - elif choice not in choices: - print('Please choose one of the four options!') - - if choice == '1': - # get the username and password - while not username: - username = input('Username: ') - while not password: - password = getpass.getpass('Password: ') - - # set up the authentication - auth = urllib.request.HTTPPasswordMgr() - host = urllib.parse.urlparse(self.repository)[1] - auth.add_password(self.realm, host, username, password) - # send the info to the server and report the result - code, result = self.post_to_server(self.build_post_data('submit'), - auth) - self.announce('Server response (%s): %s' % (code, result), - log.INFO) - - # possibly save the login - if code == 200: - if self.has_config: - # sharing the password in the distribution instance - # so the upload command can reuse it - self.distribution.password = password - else: - self.announce(('I can store your PyPI login so future ' - 'submissions will be faster.'), log.INFO) - self.announce('(the login will be stored in %s)' % \ - self._get_rc_file(), log.INFO) - choice = 'X' - while choice.lower() not in 'yn': - choice = input('Save your login (y/N)?') - if not choice: - choice = 'n' - if choice.lower() == 'y': - self._store_pypirc(username, password) - - elif choice == '2': - data = {':action': 'user'} - data['name'] = data['password'] = data['email'] = '' - data['confirm'] = None - while not data['name']: - data['name'] = input('Username: ') - while data['password'] != data['confirm']: - while not data['password']: - data['password'] = getpass.getpass('Password: ') - while not data['confirm']: - data['confirm'] = getpass.getpass(' Confirm: ') - if data['password'] != data['confirm']: - data['password'] = '' - data['confirm'] = None - print("Password and confirm don't match!") - while not data['email']: - data['email'] = input(' EMail: ') - code, result = self.post_to_server(data) - if code != 200: - log.info('Server response (%s): %s', code, result) - else: - log.info('You will receive an email shortly.') - log.info(('Follow the instructions in it to ' - 'complete registration.')) - elif choice == '3': - data = {':action': 'password_reset'} - data['email'] = '' - while not data['email']: - data['email'] = input('Your email address: ') - code, result = self.post_to_server(data) - log.info('Server response (%s): %s', code, result) - - def build_post_data(self, action): - # figure the data to send - the metadata plus some additional - # information used by the package server - meta = self.distribution.metadata - data = { - ':action': action, - 'metadata_version' : '1.0', - 'name': meta.get_name(), - 'version': meta.get_version(), - 'summary': meta.get_description(), - 'home_page': meta.get_url(), - 'author': meta.get_contact(), - 'author_email': meta.get_contact_email(), - 'license': meta.get_licence(), - 'description': meta.get_long_description(), - 'keywords': meta.get_keywords(), - 'platform': meta.get_platforms(), - 'classifiers': meta.get_classifiers(), - 'download_url': meta.get_download_url(), - # PEP 314 - 'provides': meta.get_provides(), - 'requires': meta.get_requires(), - 'obsoletes': meta.get_obsoletes(), - } - if data['provides'] or data['requires'] or data['obsoletes']: - data['metadata_version'] = '1.1' - return data - - def post_to_server(self, data, auth=None): - ''' Post a query to the server, and return a string response. - ''' - if 'name' in data: - self.announce('Registering %s to %s' % (data['name'], - self.repository), - log.INFO) - # Build up the MIME payload for the urllib2 POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = io.StringIO() - for key, value in data.items(): - # handle multiple entries for the same name - if not isinstance(value, (list, tuple)): - value = [value] - for value in value: - value = str(value) - body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - body.write('\n') # write an extra newline (lurve Macs) - body.write(end_boundary) - body.write("\n") - body = body.getvalue().encode("utf-8") - - # build the Request - headers = { - 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, - 'Content-length': str(len(body)) - } - req = urllib.request.Request(self.repository, body, headers) - - # handle HTTP and include the Basic Auth handler - opener = urllib.request.build_opener( - urllib.request.HTTPBasicAuthHandler(password_mgr=auth) - ) - data = '' - try: - result = opener.open(req) - except urllib.error.HTTPError as e: - if self.show_response: - data = e.fp.read() - result = e.code, e.msg - except urllib.error.URLError as e: - result = 500, str(e) - else: - if self.show_response: - data = self._read_pypi_response(result) - result = 200, 'OK' - if self.show_response: - msg = '\n'.join(('-' * 75, data, '-' * 75)) - self.announce(msg, log.INFO) - return result diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py deleted file mode 100644 index b4996fcb1d276c..00000000000000 --- a/Lib/distutils/command/sdist.py +++ /dev/null @@ -1,494 +0,0 @@ -"""distutils.command.sdist - -Implements the Distutils 'sdist' command (create a source distribution).""" - -import os -import sys -from glob import glob -from warnings import warn - -from distutils.core import Command -from distutils import dir_util -from distutils import file_util -from distutils import archive_util -from distutils.text_file import TextFile -from distutils.filelist import FileList -from distutils import log -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsOptionError - - -def show_formats(): - """Print all possible values for the 'formats' option (used by - the "--help-formats" command-line option). - """ - from distutils.fancy_getopt import FancyGetopt - from distutils.archive_util import ARCHIVE_FORMATS - formats = [] - for format in ARCHIVE_FORMATS.keys(): - formats.append(("formats=" + format, None, - ARCHIVE_FORMATS[format][2])) - formats.sort() - FancyGetopt(formats).print_help( - "List of available source distribution formats:") - - -class sdist(Command): - - description = "create a source distribution (tarball, zip file, etc.)" - - def checking_metadata(self): - """Callable used for the check sub-command. - - Placed here so user_options can view it""" - return self.metadata_check - - user_options = [ - ('template=', 't', - "name of manifest template file [default: MANIFEST.in]"), - ('manifest=', 'm', - "name of manifest file [default: MANIFEST]"), - ('use-defaults', None, - "include the default file set in the manifest " - "[default; disable with --no-defaults]"), - ('no-defaults', None, - "don't include the default file set"), - ('prune', None, - "specifically exclude files/directories that should not be " - "distributed (build tree, RCS/CVS dirs, etc.) " - "[default; disable with --no-prune]"), - ('no-prune', None, - "don't automatically exclude anything"), - ('manifest-only', 'o', - "just regenerate the manifest and then stop " - "(implies --force-manifest)"), - ('force-manifest', 'f', - "forcibly regenerate the manifest and carry on as usual. " - "Deprecated: now the manifest is always regenerated."), - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ('metadata-check', None, - "Ensure that all required elements of meta-data " - "are supplied. Warn if any missing. [default]"), - ('owner=', 'u', - "Owner name used when creating a tar file [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file [default: current group]"), - ] - - boolean_options = ['use-defaults', 'prune', - 'manifest-only', 'force-manifest', - 'keep-temp', 'metadata-check'] - - help_options = [ - ('help-formats', None, - "list available distribution formats", show_formats), - ] - - negative_opt = {'no-defaults': 'use-defaults', - 'no-prune': 'prune' } - - sub_commands = [('check', checking_metadata)] - - READMES = ('README', 'README.txt', 'README.rst') - - def initialize_options(self): - # 'template' and 'manifest' are, respectively, the names of - # the manifest template and manifest file. - self.template = None - self.manifest = None - - # 'use_defaults': if true, we will include the default file set - # in the manifest - self.use_defaults = 1 - self.prune = 1 - - self.manifest_only = 0 - self.force_manifest = 0 - - self.formats = ['gztar'] - self.keep_temp = 0 - self.dist_dir = None - - self.archive_files = None - self.metadata_check = 1 - self.owner = None - self.group = None - - def finalize_options(self): - if self.manifest is None: - self.manifest = "MANIFEST" - if self.template is None: - self.template = "MANIFEST.in" - - self.ensure_string_list('formats') - - bad_format = archive_util.check_archive_formats(self.formats) - if bad_format: - raise DistutilsOptionError( - "unknown archive format '%s'" % bad_format) - - if self.dist_dir is None: - self.dist_dir = "dist" - - def run(self): - # 'filelist' contains the list of files that will make up the - # manifest - self.filelist = FileList() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - # Do whatever it takes to get the list of files to process - # (process the manifest template, read an existing manifest, - # whatever). File list is accumulated in 'self.filelist'. - self.get_file_list() - - # If user just wanted us to regenerate the manifest, stop now. - if self.manifest_only: - return - - # Otherwise, go ahead and create the source distribution tarball, - # or zipfile, or whatever. - self.make_distribution() - - def check_metadata(self): - """Deprecated API.""" - warn("distutils.command.sdist.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) - check = self.distribution.get_command_obj('check') - check.ensure_finalized() - check.run() - - def get_file_list(self): - """Figure out the list of files to include in the source - distribution, and put it in 'self.filelist'. This might involve - reading the manifest template (and writing the manifest), or just - reading the manifest, or just using the default file set -- it all - depends on the user's options. - """ - # new behavior when using a template: - # the file list is recalculated every time because - # even if MANIFEST.in or setup.py are not changed - # the user might have added some files in the tree that - # need to be included. - # - # This makes --force the default and only behavior with templates. - template_exists = os.path.isfile(self.template) - if not template_exists and self._manifest_is_not_generated(): - self.read_manifest() - self.filelist.sort() - self.filelist.remove_duplicates() - return - - if not template_exists: - self.warn(("manifest template '%s' does not exist " + - "(using default file list)") % - self.template) - self.filelist.findall() - - if self.use_defaults: - self.add_defaults() - - if template_exists: - self.read_template() - - if self.prune: - self.prune_file_list() - - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def add_defaults(self): - """Add all the default files to self.filelist: - - README or README.txt - - setup.py - - test/test*.py - - all pure Python modules mentioned in setup script - - all files pointed by package_data (build_py) - - all files defined in data_files. - - all files defined as scripts. - - all C sources listed as part of extensions or C libraries - in the setup script (doesn't catch C headers!) - Warns if (README or README.txt) or setup.py are missing; everything - else is optional. - """ - self._add_defaults_standards() - self._add_defaults_optional() - self._add_defaults_python() - self._add_defaults_data_files() - self._add_defaults_ext() - self._add_defaults_c_libs() - self._add_defaults_scripts() - - @staticmethod - def _cs_path_exists(fspath): - """ - Case-sensitive path existence check - - >>> sdist._cs_path_exists(__file__) - True - >>> sdist._cs_path_exists(__file__.upper()) - False - """ - if not os.path.exists(fspath): - return False - # make absolute so we always have a directory - abspath = os.path.abspath(fspath) - directory, filename = os.path.split(abspath) - return filename in os.listdir(directory) - - def _add_defaults_standards(self): - standards = [self.READMES, self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = False - for fn in alts: - if self._cs_path_exists(fn): - got_it = True - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if self._cs_path_exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - def _add_defaults_optional(self): - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = filter(os.path.isfile, glob(pattern)) - self.filelist.extend(files) - - def _add_defaults_python(self): - # build_py is used to get: - # - python modules - # - files defined in package_data - build_py = self.get_finalized_command('build_py') - - # getting python files - if self.distribution.has_pure_modules(): - self.filelist.extend(build_py.get_source_files()) - - # getting package_data files - # (computed in build_py.data_files by build_py.finalize_options) - for pkg, src_dir, build_dir, filenames in build_py.data_files: - for filename in filenames: - self.filelist.append(os.path.join(src_dir, filename)) - - def _add_defaults_data_files(self): - # getting distribution.data_files - if self.distribution.has_data_files(): - for item in self.distribution.data_files: - if isinstance(item, str): - # plain file - item = convert_path(item) - if os.path.isfile(item): - self.filelist.append(item) - else: - # a (dirname, filenames) tuple - dirname, filenames = item - for f in filenames: - f = convert_path(f) - if os.path.isfile(f): - self.filelist.append(f) - - def _add_defaults_ext(self): - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - def _add_defaults_c_libs(self): - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - def _add_defaults_scripts(self): - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - def read_template(self): - """Read and parse manifest template file named by self.template. - - (usually "MANIFEST.in") The parsing and processing is done by - 'self.filelist', which updates itself accordingly. - """ - log.info("reading manifest template '%s'", self.template) - template = TextFile(self.template, strip_comments=1, skip_blanks=1, - join_lines=1, lstrip_ws=1, rstrip_ws=1, - collapse_join=1) - - try: - while True: - line = template.readline() - if line is None: # end of file - break - - try: - self.filelist.process_template_line(line) - # the call above can raise a DistutilsTemplateError for - # malformed lines, or a ValueError from the lower-level - # convert_path function - except (DistutilsTemplateError, ValueError) as msg: - self.warn("%s, line %d: %s" % (template.filename, - template.current_line, - msg)) - finally: - template.close() - - def prune_file_list(self): - """Prune off branches that might slip into the file list as created - by 'read_template()', but really don't belong there: - * the build tree (typically "build") - * the release tree itself (only an issue if we ran "sdist" - previously with --keep-temp, or it aborted) - * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories - """ - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - - if sys.platform == 'win32': - seps = r'/|\\' - else: - seps = '/' - - vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', - '_darcs'] - vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) - self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) - - def write_manifest(self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - if self._manifest_is_not_generated(): - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return - - content = self.filelist.files[:] - content.insert(0, '# file GENERATED by distutils, do NOT edit') - self.execute(file_util.write_file, (self.manifest, content), - "writing manifest file '%s'" % self.manifest) - - def _manifest_is_not_generated(self): - # check for special comment used in 3.1.3 and higher - if not os.path.isfile(self.manifest): - return False - - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - return first_line != '# file GENERATED by distutils, do NOT edit\n' - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - with open(self.manifest) as manifest: - for line in manifest: - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - - def make_release_tree(self, base_dir, files): - """Create the directory tree that will become the source - distribution archive. All directories implied by the filenames in - 'files' are created under 'base_dir', and then we hard link or copy - (if hard linking is unavailable) those files into place. - Essentially, this duplicates the developer's source tree, but in a - directory named after the distribution, containing only the files - to be distributed. - """ - # Create all the directories under 'base_dir' necessary to - # put 'files' there; the 'mkpath()' is just so we don't die - # if the manifest happens to be empty. - self.mkpath(base_dir) - dir_util.create_tree(base_dir, files, dry_run=self.dry_run) - - # And walk over the list of files, either making a hard link (if - # os.link exists) to each one that doesn't already exist in its - # corresponding location under 'base_dir', or copying each file - # that's out-of-date in 'base_dir'. (Usually, all files will be - # out-of-date, because by default we blow away 'base_dir' when - # we're done making the distribution archives.) - - if hasattr(os, 'link'): # can make hard links on this system - link = 'hard' - msg = "making hard links in %s..." % base_dir - else: # nope, have to copy - link = None - msg = "copying files to %s..." % base_dir - - if not files: - log.warn("no files to distribute -- empty manifest?") - else: - log.info(msg) - for file in files: - if not os.path.isfile(file): - log.warn("'%s' not a regular file -- skipping", file) - else: - dest = os.path.join(base_dir, file) - self.copy_file(file, dest, link=link) - - self.distribution.metadata.write_pkg_info(base_dir) - - def make_distribution(self): - """Create the source distribution(s). First, we create the release - tree with 'make_release_tree()'; then, we create all required - archive files (according to 'self.formats') from the release tree. - Finally, we clean up by blowing away the release tree (unless - 'self.keep_temp' is true). The list of archive files created is - stored so it can be retrieved later by 'get_archive_files()'. - """ - # Don't warn about missing meta-data here -- should be (and is!) - # done elsewhere. - base_dir = self.distribution.get_fullname() - base_name = os.path.join(self.dist_dir, base_dir) - - self.make_release_tree(base_dir, self.filelist.files) - archive_files = [] # remember names of files we create - # tar archive must be created last to avoid overwrite and remove - if 'tar' in self.formats: - self.formats.append(self.formats.pop(self.formats.index('tar'))) - - for fmt in self.formats: - file = self.make_archive(base_name, fmt, base_dir=base_dir, - owner=self.owner, group=self.group) - archive_files.append(file) - self.distribution.dist_files.append(('sdist', '', file)) - - self.archive_files = archive_files - - if not self.keep_temp: - dir_util.remove_tree(base_dir, dry_run=self.dry_run) - - def get_archive_files(self): - """Return the list of archive files created when the command - was run, or None if the command hasn't run yet. - """ - return self.archive_files diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py deleted file mode 100644 index e0ecb655b93faf..00000000000000 --- a/Lib/distutils/command/upload.py +++ /dev/null @@ -1,215 +0,0 @@ -""" -distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to a package -index). -""" - -import os -import io -import hashlib -from base64 import standard_b64encode -from urllib.error import HTTPError -from urllib.request import urlopen, Request -from urllib.parse import urlparse -from distutils.errors import DistutilsError, DistutilsOptionError -from distutils.core import PyPIRCCommand -from distutils.spawn import spawn -from distutils import log - - -# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) -# https://bugs.python.org/issue40698 -_FILE_CONTENT_DIGESTS = { - "md5_digest": getattr(hashlib, "md5", None), - "sha256_digest": getattr(hashlib, "sha256", None), - "blake2_256_digest": getattr(hashlib, "blake2b", None), -} - - -class upload(PyPIRCCommand): - - description = "upload binary package to PyPI" - - user_options = PyPIRCCommand.user_options + [ - ('sign', 's', - 'sign files to upload using gpg'), - ('identity=', 'i', 'GPG identity used to sign files'), - ] - - boolean_options = PyPIRCCommand.boolean_options + ['sign'] - - def initialize_options(self): - PyPIRCCommand.initialize_options(self) - self.username = '' - self.password = '' - self.show_response = 0 - self.sign = False - self.identity = None - - def finalize_options(self): - PyPIRCCommand.finalize_options(self) - if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) - config = self._read_pypirc() - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - - # getting the password from the distribution - # if previously set by the register command - if not self.password and self.distribution.password: - self.password = self.distribution.password - - def run(self): - if not self.distribution.dist_files: - msg = ("Must create and upload files in one command " - "(e.g. setup.py sdist upload)") - raise DistutilsOptionError(msg) - for command, pyversion, filename in self.distribution.dist_files: - self.upload_file(command, pyversion, filename) - - def upload_file(self, command, pyversion, filename): - # Makes sure the repository URL is compliant - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) - if params or query or fragments: - raise AssertionError("Incompatible url %s" % self.repository) - - if schema not in ('http', 'https'): - raise AssertionError("unsupported schema " + schema) - - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - send all the meta-data in case we need to - # register a new release - f = open(filename,'rb') - try: - content = f.read() - finally: - f.close() - - meta = self.distribution.metadata - data = { - # action - ':action': 'file_upload', - 'protocol_version': '1', - - # identify release - 'name': meta.get_name(), - 'version': meta.get_version(), - - # file content - 'content': (os.path.basename(filename),content), - 'filetype': command, - 'pyversion': pyversion, - - # additional meta-data - 'metadata_version': '1.0', - 'summary': meta.get_description(), - 'home_page': meta.get_url(), - 'author': meta.get_contact(), - 'author_email': meta.get_contact_email(), - 'license': meta.get_licence(), - 'description': meta.get_long_description(), - 'keywords': meta.get_keywords(), - 'platform': meta.get_platforms(), - 'classifiers': meta.get_classifiers(), - 'download_url': meta.get_download_url(), - # PEP 314 - 'provides': meta.get_provides(), - 'requires': meta.get_requires(), - 'obsoletes': meta.get_obsoletes(), - } - - data['comment'] = '' - - # file content digests - for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): - if digest_cons is None: - continue - try: - data[digest_name] = digest_cons(content).hexdigest() - except ValueError: - # hash digest not available or blocked by security policy - pass - - if self.sign: - with open(filename + ".asc", "rb") as f: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - f.read()) - - # set up the authentication - user_pass = (self.username + ":" + self.password).encode('ascii') - # The exact encoding of the authentication string is debated. - # Anyway PyPI only accepts ascii for both username or password. - auth = "Basic " + standard_b64encode(user_pass).decode('ascii') - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b'\r\n--' + boundary.encode('ascii') - end_boundary = sep_boundary + b'--\r\n' - body = io.BytesIO() - for key, value in data.items(): - title = '\r\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(value, list): - value = [value] - for value in value: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = str(value).encode('utf-8') - body.write(sep_boundary) - body.write(title.encode('utf-8')) - body.write(b"\r\n\r\n") - body.write(value) - body.write(end_boundary) - body = body.getvalue() - - msg = "Submitting %s to %s" % (filename, self.repository) - self.announce(msg, log.INFO) - - # build the Request - headers = { - 'Content-type': 'multipart/form-data; boundary=%s' % boundary, - 'Content-length': str(len(body)), - 'Authorization': auth, - } - - request = Request(self.repository, data=body, - headers=headers) - # send the data - try: - result = urlopen(request) - status = result.getcode() - reason = result.msg - except HTTPError as e: - status = e.code - reason = e.msg - except OSError as e: - self.announce(str(e), log.ERROR) - raise - - if status == 200: - self.announce('Server response (%s): %s' % (status, reason), - log.INFO) - if self.show_response: - text = self._read_pypi_response(result) - msg = '\n'.join(('-' * 75, text, '-' * 75)) - self.announce(msg, log.INFO) - else: - msg = 'Upload failed (%s): %s' % (status, reason) - self.announce(msg, log.ERROR) - raise DistutilsError(msg) diff --git a/Lib/distutils/config.py b/Lib/distutils/config.py deleted file mode 100644 index a201c86a176844..00000000000000 --- a/Lib/distutils/config.py +++ /dev/null @@ -1,133 +0,0 @@ -"""distutils.pypirc - -Provides the PyPIRCCommand class, the base class for the command classes -that uses .pypirc in the distutils.command package. -""" -import os -from configparser import RawConfigParser -import warnings - -from distutils.cmd import Command - -DEFAULT_PYPIRC = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:%s -password:%s -""" - -class PyPIRCCommand(Command): - """Base command that knows how to handle the .pypirc file - """ - DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' - DEFAULT_REALM = 'pypi' - repository = None - realm = None - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % \ - DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server')] - - boolean_options = ['show-response'] - - def _get_rc_file(self): - """Returns rc file path.""" - return os.path.join(os.path.expanduser('~'), '.pypirc') - - def _store_pypirc(self, username, password): - """Creates a default .pypirc file.""" - rc = self._get_rc_file() - with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: - f.write(DEFAULT_PYPIRC % (username, password)) - - def _read_pypirc(self): - """Reads the .pypirc file.""" - rc = self._get_rc_file() - if os.path.exists(rc): - self.announce('Using PyPI login from %s' % rc) - repository = self.repository or self.DEFAULT_REPOSITORY - - config = RawConfigParser() - config.read(rc) - sections = config.sections() - if 'distutils' in sections: - # let's get the list of servers - index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] - if _servers == []: - # nothing set, let's try to get the default pypi - if 'pypi' in sections: - _servers = ['pypi'] - else: - # the file is not properly defined, returning - # an empty dict - return {} - for server in _servers: - current = {'server': server} - current['username'] = config.get(server, 'username') - - # optional params - for key, default in (('repository', - self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), - ('password', None)): - if config.has_option(server, key): - current[key] = config.get(server, key) - else: - current[key] = default - - # work around people having "repository" for the "pypi" - # section of their config set to the HTTP (rather than - # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): - current['repository'] = self.DEFAULT_REPOSITORY - return current - - if (current['server'] == repository or - current['repository'] == repository): - return current - elif 'server-login' in sections: - # old format - server = 'server-login' - if config.has_option(server, 'repository'): - repository = config.get(server, 'repository') - else: - repository = self.DEFAULT_REPOSITORY - return {'username': config.get(server, 'username'), - 'password': config.get(server, 'password'), - 'repository': repository, - 'server': server, - 'realm': self.DEFAULT_REALM} - - return {} - - def _read_pypi_response(self, response): - """Read and decode a PyPI HTTP response.""" - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - import cgi - content_type = response.getheader('content-type', 'text/plain') - encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') - return response.read().decode(encoding) - - def initialize_options(self): - """Initialize options.""" - self.repository = None - self.realm = None - self.show_response = 0 - - def finalize_options(self): - """Finalizes options.""" - if self.repository is None: - self.repository = self.DEFAULT_REPOSITORY - if self.realm is None: - self.realm = self.DEFAULT_REALM diff --git a/Lib/distutils/core.py b/Lib/distutils/core.py deleted file mode 100644 index d603d4a45a73ee..00000000000000 --- a/Lib/distutils/core.py +++ /dev/null @@ -1,234 +0,0 @@ -"""distutils.core - -The only module that needs to be imported to use the Distutils; provides -the 'setup' function (which is to be called from the setup script). Also -indirectly provides the Distribution and Command classes, although they are -really defined in distutils.dist and distutils.cmd. -""" - -import os -import sys - -from distutils.debug import DEBUG -from distutils.errors import * - -# Mainly import these so setup scripts can "from distutils.core import" them. -from distutils.dist import Distribution -from distutils.cmd import Command -from distutils.config import PyPIRCCommand -from distutils.extension import Extension - -# This is a barebones help message generated displayed when the user -# runs the setup script with no arguments at all. More useful help -# is generated with various --help options: global help, list commands, -# and per-command help. -USAGE = """\ -usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] - or: %(script)s --help [cmd1 cmd2 ...] - or: %(script)s --help-commands - or: %(script)s cmd --help -""" - -def gen_usage (script_name): - script = os.path.basename(script_name) - return USAGE % vars() - - -# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. -_setup_stop_after = None -_setup_distribution = None - -# Legal keyword arguments for the setup() function -setup_keywords = ('distclass', 'script_name', 'script_args', 'options', - 'name', 'version', 'author', 'author_email', - 'maintainer', 'maintainer_email', 'url', 'license', - 'description', 'long_description', 'keywords', - 'platforms', 'classifiers', 'download_url', - 'requires', 'provides', 'obsoletes', - ) - -# Legal keyword arguments for the Extension constructor -extension_keywords = ('name', 'sources', 'include_dirs', - 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'swig_opts', 'export_symbols', 'depends', 'language') - -def setup (**attrs): - """The gateway to the Distutils: do everything your setup script needs - to do, in a highly flexible and user-driven way. Briefly: create a - Distribution instance; find and parse config files; parse the command - line; run each Distutils command found there, customized by the options - supplied to 'setup()' (as keyword arguments), in config files, and on - the command line. - - The Distribution instance might be an instance of a class supplied via - the 'distclass' keyword argument to 'setup'; if no such class is - supplied, then the Distribution class (in dist.py) is instantiated. - All other arguments to 'setup' (except for 'cmdclass') are used to set - attributes of the Distribution instance. - - The 'cmdclass' argument, if supplied, is a dictionary mapping command - names to command classes. Each command encountered on the command line - will be turned into a command class, which is in turn instantiated; any - class found in 'cmdclass' is used in place of the default, which is - (for command 'foo_bar') class 'foo_bar' in module - 'distutils.command.foo_bar'. The command class must provide a - 'user_options' attribute which is a list of option specifiers for - 'distutils.fancy_getopt'. Any command-line options between the current - and the next command are used to set attributes of the current command - object. - - When the entire command-line has been successfully parsed, calls the - 'run()' method on each command object in turn. This method will be - driven entirely by the Distribution object (which each command object - has a reference to, thanks to its constructor), and the - command-specific options that became attributes of each command - object. - """ - - global _setup_stop_after, _setup_distribution - - # Determine the distribution class -- either caller-supplied or - # our Distribution (see below). - klass = attrs.get('distclass') - if klass: - del attrs['distclass'] - else: - klass = Distribution - - if 'script_name' not in attrs: - attrs['script_name'] = os.path.basename(sys.argv[0]) - if 'script_args' not in attrs: - attrs['script_args'] = sys.argv[1:] - - # Create the Distribution instance, using the remaining arguments - # (ie. everything except distclass) to initialize it - try: - _setup_distribution = dist = klass(attrs) - except DistutilsSetupError as msg: - if 'name' not in attrs: - raise SystemExit("error in setup command: %s" % msg) - else: - raise SystemExit("error in %s setup command: %s" % \ - (attrs['name'], msg)) - - if _setup_stop_after == "init": - return dist - - # Find and parse the config file(s): they will override options from - # the setup script, but be overridden by the command line. - dist.parse_config_files() - - if DEBUG: - print("options (after parsing config files):") - dist.dump_option_dicts() - - if _setup_stop_after == "config": - return dist - - # Parse the command line and override config files; any - # command-line errors are the end user's fault, so turn them into - # SystemExit to suppress tracebacks. - try: - ok = dist.parse_command_line() - except DistutilsArgError as msg: - raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg) - - if DEBUG: - print("options (after parsing command line):") - dist.dump_option_dicts() - - if _setup_stop_after == "commandline": - return dist - - # And finally, run all the commands found on the command line. - if ok: - try: - dist.run_commands() - except KeyboardInterrupt: - raise SystemExit("interrupted") - except OSError as exc: - if DEBUG: - sys.stderr.write("error: %s\n" % (exc,)) - raise - else: - raise SystemExit("error: %s" % (exc,)) - - except (DistutilsError, - CCompilerError) as msg: - if DEBUG: - raise - else: - raise SystemExit("error: " + str(msg)) - - return dist - -# setup () - - -def run_setup (script_name, script_args=None, stop_after="run"): - """Run a setup script in a somewhat controlled environment, and - return the Distribution instance that drives things. This is useful - if you need to find out the distribution meta-data (passed as - keyword args from 'script' to 'setup()', or the contents of the - config files or command-line. - - 'script_name' is a file that will be read and run with 'exec()'; - 'sys.argv[0]' will be replaced with 'script' for the duration of the - call. 'script_args' is a list of strings; if supplied, - 'sys.argv[1:]' will be replaced by 'script_args' for the duration of - the call. - - 'stop_after' tells 'setup()' when to stop processing; possible - values: - init - stop after the Distribution instance has been created and - populated with the keyword arguments to 'setup()' - config - stop after config files have been parsed (and their data - stored in the Distribution instance) - commandline - stop after the command-line ('sys.argv[1:]' or 'script_args') - have been parsed (and the data stored in the Distribution) - run [default] - stop after all commands have been run (the same as if 'setup()' - had been called in the usual way - - Returns the Distribution instance, which provides all information - used to drive the Distutils. - """ - if stop_after not in ('init', 'config', 'commandline', 'run'): - raise ValueError("invalid value for 'stop_after': %r" % (stop_after,)) - - global _setup_stop_after, _setup_distribution - _setup_stop_after = stop_after - - save_argv = sys.argv.copy() - g = {'__file__': script_name} - try: - try: - sys.argv[0] = script_name - if script_args is not None: - sys.argv[1:] = script_args - with open(script_name, 'rb') as f: - exec(f.read(), g) - finally: - sys.argv = save_argv - _setup_stop_after = None - except SystemExit: - # Hmm, should we do something if exiting with a non-zero code - # (ie. error)? - pass - - if _setup_distribution is None: - raise RuntimeError(("'distutils.core.setup()' was never called -- " - "perhaps '%s' is not a Distutils setup script?") % \ - script_name) - - # I wonder if the setup script's namespace -- g and l -- would be of - # any interest to callers? - #print "_setup_distribution:", _setup_distribution - return _setup_distribution - -# run_setup () diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py deleted file mode 100644 index 66c12dd35830b2..00000000000000 --- a/Lib/distutils/cygwinccompiler.py +++ /dev/null @@ -1,403 +0,0 @@ -"""distutils.cygwinccompiler - -Provides the CygwinCCompiler class, a subclass of UnixCCompiler that -handles the Cygwin port of the GNU C compiler to Windows. It also contains -the Mingw32CCompiler class which handles the mingw32 port of GCC (same as -cygwin in no-cygwin mode). -""" - -# problems: -# -# * if you use a msvc compiled python version (1.5.2) -# 1. you have to insert a __GNUC__ section in its config.h -# 2. you have to generate an import library for its dll -# - create a def-file for python??.dll -# - create an import library using -# dlltool --dllname python15.dll --def python15.def \ -# --output-lib libpython15.a -# -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# -# * We put export_symbols in a def-file, and don't use -# --export-all-symbols because it doesn't worked reliable in some -# tested configurations. And because other windows compilers also -# need their symbols specified this no serious problem. -# -# tested configurations: -# -# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works -# (after patching python's config.h and for C++ some other include files) -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works -# (ld doesn't support -shared, so we use dllwrap) -# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now -# - its dllwrap doesn't work, there is a bug in binutils 2.10.90 -# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html -# - using gcc -mdll instead dllwrap doesn't work without -static because -# it tries to link against dlls instead their import libraries. (If -# it finds the dll first.) -# By specifying -static we force ld to link against the import libraries, -# this is windows standard and there are normally not the necessary symbols -# in the dlls. -# *** only the version of June 2000 shows these problems -# * cygwin gcc 3.2/ld 2.13.90 works -# (ld supports -shared) -# * mingw gcc 3.2/ld 2.13 works -# (ld supports -shared) - -import os -import sys -import copy -from subprocess import Popen, PIPE, check_output -import re - -from distutils.unixccompiler import UnixCCompiler -from distutils.file_util import write_file -from distutils.errors import (DistutilsExecError, CCompilerError, - CompileError, UnknownFileError) -from distutils.version import LooseVersion -from distutils.spawn import find_executable - -def get_msvcr(): - """Include the appropriate MSVC runtime library if Python was built - with MSVC 7.0 or later. - """ - msc_pos = sys.version.find('MSC v.') - if msc_pos != -1: - msc_ver = sys.version[msc_pos+6:msc_pos+10] - if msc_ver == '1300': - # MSVC 7.0 - return ['msvcr70'] - elif msc_ver == '1310': - # MSVC 7.1 - return ['msvcr71'] - elif msc_ver == '1400': - # VS2005 / MSVC 8.0 - return ['msvcr80'] - elif msc_ver == '1500': - # VS2008 / MSVC 9.0 - return ['msvcr90'] - elif msc_ver == '1600': - # VS2010 / MSVC 10.0 - return ['msvcr100'] - else: - raise ValueError("Unknown MS Compiler version %s " % msc_ver) - - -class CygwinCCompiler(UnixCCompiler): - """ Handles the Cygwin port of the GNU C compiler to Windows. - """ - compiler_type = 'cygwin' - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".dll" - static_lib_format = "lib%s%s" - shared_lib_format = "%s%s" - exe_extension = ".exe" - - def __init__(self, verbose=0, dry_run=0, force=0): - - UnixCCompiler.__init__(self, verbose, dry_run, force) - - status, details = check_config_h() - self.debug_print("Python's GCC status: %s (details: %s)" % - (status, details)) - if status is not CONFIG_H_OK: - self.warn( - "Python's pyconfig.h doesn't seem to support your compiler. " - "Reason: %s. " - "Compiling may fail because of undefined preprocessor macros." - % details) - - self.gcc_version, self.ld_version, self.dllwrap_version = \ - get_versions() - self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" % - (self.gcc_version, - self.ld_version, - self.dllwrap_version) ) - - # ld_version >= "2.10.90" and < "2.13" should also be able to use - # gcc -mdll instead of dllwrap - # Older dllwraps had own version numbers, newer ones use the - # same as the rest of binutils ( also ld ) - # dllwrap 2.10.90 is buggy - if self.ld_version >= "2.10.90": - self.linker_dll = "gcc" - else: - self.linker_dll = "dllwrap" - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if self.ld_version >= "2.13": - shared_option = "-shared" - else: - shared_option = "-mdll -static" - - # Hard-code GCC because that's what this is all about. - # XXX optimization, warnings etc. should be customizable. - self.set_executables(compiler='gcc -mcygwin -O -Wall', - compiler_so='gcc -mcygwin -mdll -O -Wall', - compiler_cxx='g++ -mcygwin -O -Wall', - linker_exe='gcc -mcygwin', - linker_so=('%s -mcygwin %s' % - (self.linker_dll, shared_option))) - - # cygwin and mingw32 need different sets of libraries - if self.gcc_version == "2.91.57": - # cygwin shouldn't need msvcrt, but without the dlls will crash - # (gcc version 2.91.57) -- perhaps something about initialization - self.dll_libraries=["msvcrt"] - self.warn( - "Consider upgrading to a newer version of gcc") - else: - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compiles the source by spawning GCC and windres if needed.""" - if ext == '.rc' or ext == '.res': - # gcc needs '.res' and '.rc' compiled to object files !!! - try: - self.spawn(["windres", "-i", src, "-o", obj]) - except DistutilsExecError as msg: - raise CompileError(msg) - else: # for other files use the C-compiler - try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - """Link the objects.""" - # use separate copies, so we can modify the lists - extra_preargs = copy.copy(extra_preargs or []) - libraries = copy.copy(libraries or []) - objects = copy.copy(objects or []) - - # Additional libraries - libraries.extend(self.dll_libraries) - - # handle export symbols by creating a def-file - # with executables this only works with gcc/ld as linker - if ((export_symbols is not None) and - (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - # (The linker doesn't do anything if output is up-to-date. - # So it would probably better to check if we really need this, - # but for this we had to insert some unchanged parts of - # UnixCCompiler, and this is not what we want.) - - # we want to put some files in the same directory as the - # object files are, build_temp doesn't help much - # where are the object files - temp_dir = os.path.dirname(objects[0]) - # name of dll to give the helper files the same base name - (dll_name, dll_extension) = os.path.splitext( - os.path.basename(output_filename)) - - # generate the filenames for these files - def_file = os.path.join(temp_dir, dll_name + ".def") - lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") - - # Generate .def file - contents = [ - "LIBRARY %s" % os.path.basename(output_filename), - "EXPORTS"] - for sym in export_symbols: - contents.append(sym) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # next add options for def-file and to creating import libraries - - # dllwrap uses different options than gcc/ld - if self.linker_dll == "dllwrap": - extra_preargs.extend(["--output-lib", lib_file]) - # for dllwrap we have to use a special option - extra_preargs.extend(["--def", def_file]) - # we use gcc/ld here and can be sure ld is >= 2.9.10 - else: - # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) - # for gcc/ld the def-file is specified as any object files - objects.append(def_file) - - #end: if ((export_symbols is not None) and - # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - - # who wants symbols and a many times larger output file - # should explicitly switch the debug mode on - # otherwise we let dllwrap/ld strip the output file - # (On my machine: 10KiB < stripped_file < ??100KiB - # unstripped_file = stripped_file + XXX KiB - # ( XXX=254 for a typical python extension)) - if not debug: - extra_preargs.append("-s") - - UnixCCompiler.link(self, target_desc, objects, output_filename, - output_dir, libraries, library_dirs, - runtime_library_dirs, - None, # export_symbols, we do this in our def-file - debug, extra_preargs, extra_postargs, build_temp, - target_lang) - - # -- Miscellaneous methods ----------------------------------------- - - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): - """Adds supports for rc and res files.""" - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - base, ext = os.path.splitext(os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext in ('.res', '.rc'): - # these need to be compiled to object files - obj_names.append (os.path.join(output_dir, - base + ext + self.obj_extension)) - else: - obj_names.append (os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - -# the same as cygwin plus some additional parameters -class Mingw32CCompiler(CygwinCCompiler): - """ Handles the Mingw32 port of the GNU C compiler to Windows. - """ - compiler_type = 'mingw32' - - def __init__(self, verbose=0, dry_run=0, force=0): - - CygwinCCompiler.__init__ (self, verbose, dry_run, force) - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if self.ld_version >= "2.13": - shared_option = "-shared" - else: - shared_option = "-mdll -static" - - # A real mingw32 doesn't need to specify a different entry point, - # but cygwin 2.91.57 in no-cygwin-mode needs it. - if self.gcc_version <= "2.91.57": - entry_point = '--entry _DllMain@12' - else: - entry_point = '' - - if is_cygwingcc(): - raise CCompilerError( - 'Cygwin gcc cannot be used with --compiler=mingw32') - - self.set_executables(compiler='gcc -O -Wall', - compiler_so='gcc -mdll -O -Wall', - compiler_cxx='g++ -O -Wall', - linker_exe='gcc', - linker_so='%s %s %s' - % (self.linker_dll, shared_option, - entry_point)) - # Maybe we should also append -mthreads, but then the finished - # dlls need another dll (mingwm10.dll see Mingw32 docs) - # (-mthreads: Support thread-safe exception handling on `Mingw32') - - # no additional libraries needed - self.dll_libraries=[] - - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - -# Because these compilers aren't configured in Python's pyconfig.h file by -# default, we should at least warn the user if he is using an unmodified -# version. - -CONFIG_H_OK = "ok" -CONFIG_H_NOTOK = "not ok" -CONFIG_H_UNCERTAIN = "uncertain" - -def check_config_h(): - """Check if the current Python installation appears amenable to building - extensions with GCC. - - Returns a tuple (status, details), where 'status' is one of the following - constants: - - - CONFIG_H_OK: all is well, go ahead and compile - - CONFIG_H_NOTOK: doesn't look good - - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h - - 'details' is a human-readable string explaining the situation. - - Note there are two ways to conclude "OK": either 'sys.version' contains - the string "GCC" (implying that this Python was built with GCC), or the - installed "pyconfig.h" contains the string "__GNUC__". - """ - - # XXX since this function also checks sys.version, it's not strictly a - # "pyconfig.h" check -- should probably be renamed... - - from distutils import sysconfig - - # if sys.version contains GCC then python was compiled with GCC, and the - # pyconfig.h file should be OK - if "GCC" in sys.version: - return CONFIG_H_OK, "sys.version mentions 'GCC'" - - # let's see if __GNUC__ is mentioned in python.h - fn = sysconfig.get_config_h_filename() - try: - config_h = open(fn) - try: - if "__GNUC__" in config_h.read(): - return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn - else: - return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn - finally: - config_h.close() - except OSError as exc: - return (CONFIG_H_UNCERTAIN, - "couldn't read '%s': %s" % (fn, exc.strerror)) - -RE_VERSION = re.compile(br'(\d+\.\d+(\.\d+)*)') - -def _find_exe_version(cmd): - """Find the version of an executable by running `cmd` in the shell. - - If the command is not found, or the output does not match - `RE_VERSION`, returns None. - """ - executable = cmd.split()[0] - if find_executable(executable) is None: - return None - out = Popen(cmd, shell=True, stdout=PIPE).stdout - try: - out_string = out.read() - finally: - out.close() - result = RE_VERSION.search(out_string) - if result is None: - return None - # LooseVersion works with strings - # so we need to decode our bytes - return LooseVersion(result.group(1).decode()) - -def get_versions(): - """ Try to find out the versions of gcc, ld and dllwrap. - - If not possible it returns None for it. - """ - commands = ['gcc -dumpversion', 'ld -v', 'dllwrap --version'] - return tuple([_find_exe_version(cmd) for cmd in commands]) - -def is_cygwingcc(): - '''Try to determine if the gcc that would be used is from cygwin.''' - out_string = check_output(['gcc', '-dumpmachine']) - return out_string.strip().endswith(b'cygwin') diff --git a/Lib/distutils/debug.py b/Lib/distutils/debug.py deleted file mode 100644 index daf1660f0d8211..00000000000000 --- a/Lib/distutils/debug.py +++ /dev/null @@ -1,5 +0,0 @@ -import os - -# If DISTUTILS_DEBUG is anything other than the empty string, we run in -# debug mode. -DEBUG = os.environ.get('DISTUTILS_DEBUG') diff --git a/Lib/distutils/dep_util.py b/Lib/distutils/dep_util.py deleted file mode 100644 index d74f5e4e92f3ed..00000000000000 --- a/Lib/distutils/dep_util.py +++ /dev/null @@ -1,92 +0,0 @@ -"""distutils.dep_util - -Utility functions for simple, timestamp-based dependency of files -and groups of files; also, function based entirely on such -timestamp dependency analysis.""" - -import os -from distutils.errors import DistutilsFileError - - -def newer (source, target): - """Return true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. Return false if - both exist and 'target' is the same age or younger than 'source'. - Raise DistutilsFileError if 'source' does not exist. - """ - if not os.path.exists(source): - raise DistutilsFileError("file '%s' does not exist" % - os.path.abspath(source)) - if not os.path.exists(target): - return 1 - - from stat import ST_MTIME - mtime1 = os.stat(source)[ST_MTIME] - mtime2 = os.stat(target)[ST_MTIME] - - return mtime1 > mtime2 - -# newer () - - -def newer_pairwise (sources, targets): - """Walk two filename lists in parallel, testing if each source is newer - than its corresponding target. Return a pair of lists (sources, - targets) where source is newer than target, according to the semantics - of 'newer()'. - """ - if len(sources) != len(targets): - raise ValueError("'sources' and 'targets' must be same length") - - # build a pair of lists (sources, targets) where source is newer - n_sources = [] - n_targets = [] - for i in range(len(sources)): - if newer(sources[i], targets[i]): - n_sources.append(sources[i]) - n_targets.append(targets[i]) - - return (n_sources, n_targets) - -# newer_pairwise () - - -def newer_group (sources, target, missing='error'): - """Return true if 'target' is out-of-date with respect to any file - listed in 'sources'. In other words, if 'target' exists and is newer - than every file in 'sources', return false; otherwise return true. - 'missing' controls what we do when a source file is missing; the - default ("error") is to blow up with an OSError from inside 'stat()'; - if it is "ignore", we silently drop any missing source files; if it is - "newer", any missing source files make us assume that 'target' is - out-of-date (this is handy in "dry-run" mode: it'll make you pretend to - carry out commands that wouldn't work because inputs are missing, but - that doesn't matter because you're not actually going to run the - commands). - """ - # If the target doesn't even exist, then it's definitely out-of-date. - if not os.path.exists(target): - return 1 - - # Otherwise we have to find out the hard way: if *any* source file - # is more recent than 'target', then 'target' is out-of-date and - # we can immediately return true. If we fall through to the end - # of the loop, then 'target' is up-to-date and we return false. - from stat import ST_MTIME - target_mtime = os.stat(target)[ST_MTIME] - for source in sources: - if not os.path.exists(source): - if missing == 'error': # blow up when we stat() the file - pass - elif missing == 'ignore': # missing source dropped from - continue # target's dependency list - elif missing == 'newer': # missing source means target is - return 1 # out-of-date - - source_mtime = os.stat(source)[ST_MTIME] - if source_mtime > target_mtime: - return 1 - else: - return 0 - -# newer_group () diff --git a/Lib/distutils/dir_util.py b/Lib/distutils/dir_util.py deleted file mode 100644 index d5cd8e3e24f46a..00000000000000 --- a/Lib/distutils/dir_util.py +++ /dev/null @@ -1,210 +0,0 @@ -"""distutils.dir_util - -Utility functions for manipulating directories and directory trees.""" - -import os -import errno -from distutils.errors import DistutilsFileError, DistutilsInternalError -from distutils import log - -# cache for by mkpath() -- in addition to cheapening redundant calls, -# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode -_path_created = {} - -# I don't use os.makedirs because a) it's new to Python 1.5.2, and -# b) it blows up if the directory already exists (I want to silently -# succeed in that case). -def mkpath(name, mode=0o777, verbose=1, dry_run=0): - """Create a directory and any missing ancestor directories. - - If the directory already exists (or if 'name' is the empty string, which - means the current directory, which of course exists), then do nothing. - Raise DistutilsFileError if unable to create some directory along the way - (eg. some sub-path exists, but is a file rather than a directory). - If 'verbose' is true, print a one-line summary of each mkdir to stdout. - Return the list of directories actually created. - """ - - global _path_created - - # Detect a common bug -- name is None - if not isinstance(name, str): - raise DistutilsInternalError( - "mkpath: 'name' must be a string (got %r)" % (name,)) - - # XXX what's the better way to handle verbosity? print as we create - # each directory in the path (the current behaviour), or only announce - # the creation of the whole path? (quite easy to do the latter since - # we're not using a recursive algorithm) - - name = os.path.normpath(name) - created_dirs = [] - if os.path.isdir(name) or name == '': - return created_dirs - if _path_created.get(os.path.abspath(name)): - return created_dirs - - (head, tail) = os.path.split(name) - tails = [tail] # stack of lone dirs to create - - while head and tail and not os.path.isdir(head): - (head, tail) = os.path.split(head) - tails.insert(0, tail) # push next higher dir onto stack - - # now 'head' contains the deepest directory that already exists - # (that is, the child of 'head' in 'name' is the highest directory - # that does *not* exist) - for d in tails: - #print "head = %s, d = %s: " % (head, d), - head = os.path.join(head, d) - abs_head = os.path.abspath(head) - - if _path_created.get(abs_head): - continue - - if verbose >= 1: - log.info("creating %s", head) - - if not dry_run: - try: - os.mkdir(head, mode) - except OSError as exc: - if not (exc.errno == errno.EEXIST and os.path.isdir(head)): - raise DistutilsFileError( - "could not create '%s': %s" % (head, exc.args[-1])) - created_dirs.append(head) - - _path_created[abs_head] = 1 - return created_dirs - -def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): - """Create all the empty directories under 'base_dir' needed to put 'files' - there. - - 'base_dir' is just the name of a directory which doesn't necessarily - exist yet; 'files' is a list of filenames to be interpreted relative to - 'base_dir'. 'base_dir' + the directory portion of every file in 'files' - will be created if it doesn't already exist. 'mode', 'verbose' and - 'dry_run' flags are as for 'mkpath()'. - """ - # First get the list of directories to create - need_dir = set() - for file in files: - need_dir.add(os.path.join(base_dir, os.path.dirname(file))) - - # Now create them - for dir in sorted(need_dir): - mkpath(dir, mode, verbose=verbose, dry_run=dry_run) - -def copy_tree(src, dst, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, update=0, verbose=1, dry_run=0): - """Copy an entire directory tree 'src' to a new location 'dst'. - - Both 'src' and 'dst' must be directory names. If 'src' is not a - directory, raise DistutilsFileError. If 'dst' does not exist, it is - created with 'mkpath()'. The end result of the copy is that every - file in 'src' is copied to 'dst', and directories under 'src' are - recursively copied to 'dst'. Return the list of files that were - copied or might have been copied, using their output name. The - return value is unaffected by 'update' or 'dry_run': it is simply - the list of all files under 'src', with the names changed to be - under 'dst'. - - 'preserve_mode' and 'preserve_times' are the same as for - 'copy_file'; note that they only apply to regular files, not to - directories. If 'preserve_symlinks' is true, symlinks will be - copied as symlinks (on platforms that support them!); otherwise - (the default), the destination of the symlink will be copied. - 'update' and 'verbose' are the same as for 'copy_file'. - """ - from distutils.file_util import copy_file - - if not dry_run and not os.path.isdir(src): - raise DistutilsFileError( - "cannot copy tree '%s': not a directory" % src) - try: - names = os.listdir(src) - except OSError as e: - if dry_run: - names = [] - else: - raise DistutilsFileError( - "error listing files in '%s': %s" % (src, e.strerror)) - - if not dry_run: - mkpath(dst, verbose=verbose) - - outputs = [] - - for n in names: - src_name = os.path.join(src, n) - dst_name = os.path.join(dst, n) - - if n.startswith('.nfs'): - # skip NFS rename files - continue - - if preserve_symlinks and os.path.islink(src_name): - link_dest = os.readlink(src_name) - if verbose >= 1: - log.info("linking %s -> %s", dst_name, link_dest) - if not dry_run: - os.symlink(link_dest, dst_name) - outputs.append(dst_name) - - elif os.path.isdir(src_name): - outputs.extend( - copy_tree(src_name, dst_name, preserve_mode, - preserve_times, preserve_symlinks, update, - verbose=verbose, dry_run=dry_run)) - else: - copy_file(src_name, dst_name, preserve_mode, - preserve_times, update, verbose=verbose, - dry_run=dry_run) - outputs.append(dst_name) - - return outputs - -def _build_cmdtuple(path, cmdtuples): - """Helper for remove_tree().""" - for f in os.listdir(path): - real_f = os.path.join(path,f) - if os.path.isdir(real_f) and not os.path.islink(real_f): - _build_cmdtuple(real_f, cmdtuples) - else: - cmdtuples.append((os.remove, real_f)) - cmdtuples.append((os.rmdir, path)) - -def remove_tree(directory, verbose=1, dry_run=0): - """Recursively remove an entire directory tree. - - Any errors are ignored (apart from being reported to stdout if 'verbose' - is true). - """ - global _path_created - - if verbose >= 1: - log.info("removing '%s' (and everything under it)", directory) - if dry_run: - return - cmdtuples = [] - _build_cmdtuple(directory, cmdtuples) - for cmd in cmdtuples: - try: - cmd[0](cmd[1]) - # remove dir from cache if it's already there - abspath = os.path.abspath(cmd[1]) - if abspath in _path_created: - del _path_created[abspath] - except OSError as exc: - log.warn("error removing %s: %s", directory, exc) - -def ensure_relative(path): - """Take the full path 'path', and make it a relative path. - - This is useful to make 'path' the second argument to os.path.join(). - """ - drive, path = os.path.splitdrive(path) - if path[0:1] == os.sep: - path = drive + path[1:] - return path diff --git a/Lib/distutils/dist.py b/Lib/distutils/dist.py deleted file mode 100644 index 6cf0a0d6632dc7..00000000000000 --- a/Lib/distutils/dist.py +++ /dev/null @@ -1,1256 +0,0 @@ -"""distutils.dist - -Provides the Distribution class, which represents the module distribution -being built/installed/distributed. -""" - -import sys -import os -import re -from email import message_from_file - -try: - import warnings -except ImportError: - warnings = None - -from distutils.errors import * -from distutils.fancy_getopt import FancyGetopt, translate_longopt -from distutils.util import check_environ, strtobool, rfc822_escape -from distutils import log -from distutils.debug import DEBUG - -# Regex to define acceptable Distutils command names. This is not *quite* -# the same as a Python NAME -- I don't allow leading underscores. The fact -# that they're very similar is no coincidence; the default naming scheme is -# to look for a Python module named after the command. -command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') - - -def _ensure_list(value, fieldname): - if isinstance(value, str): - # a string containing comma separated values is okay. It will - # be converted to a list by Distribution.finalize_options(). - pass - elif not isinstance(value, list): - # passing a tuple or an iterator perhaps, warn and convert - typename = type(value).__name__ - msg = f"Warning: '{fieldname}' should be a list, got type '{typename}'" - log.log(log.WARN, msg) - value = list(value) - return value - - -class Distribution: - """The core of the Distutils. Most of the work hiding behind 'setup' - is really done within a Distribution instance, which farms the work out - to the Distutils commands specified on the command line. - - Setup scripts will almost never instantiate Distribution directly, - unless the 'setup()' function is totally inadequate to their needs. - However, it is conceivable that a setup script might wish to subclass - Distribution for some specialized purpose, and then pass the subclass - to 'setup()' as the 'distclass' keyword argument. If so, it is - necessary to respect the expectations that 'setup' has of Distribution. - See the code for 'setup()', in core.py, for details. - """ - - # 'global_options' describes the command-line options that may be - # supplied to the setup script prior to any actual commands. - # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of - # these global options. This list should be kept to a bare minimum, - # since every global option is also valid as a command option -- and we - # don't want to pollute the commands with too many options that they - # have minimal control over. - # The fourth entry for verbose means that it can be repeated. - global_options = [ - ('verbose', 'v', "run verbosely (default)", 1), - ('quiet', 'q', "run quietly (turns verbosity off)"), - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), - ] - - # 'common_usage' is a short (2-3 line) string describing the common - # usage of the setup script. - common_usage = """\ -Common commands: (see '--help-commands' for more) - - setup.py build will build the package underneath 'build/' - setup.py install will install the package -""" - - # options that are not propagated to the commands - display_options = [ - ('help-commands', None, - "list all available commands"), - ('name', None, - "print package name"), - ('version', 'V', - "print package version"), - ('fullname', None, - "print <package name>-<version>"), - ('author', None, - "print the author's name"), - ('author-email', None, - "print the author's email address"), - ('maintainer', None, - "print the maintainer's name"), - ('maintainer-email', None, - "print the maintainer's email address"), - ('contact', None, - "print the maintainer's name if known, else the author's"), - ('contact-email', None, - "print the maintainer's email address if known, else the author's"), - ('url', None, - "print the URL for this package"), - ('license', None, - "print the license of the package"), - ('licence', None, - "alias for --license"), - ('description', None, - "print the package description"), - ('long-description', None, - "print the long package description"), - ('platforms', None, - "print the list of platforms"), - ('classifiers', None, - "print the list of classifiers"), - ('keywords', None, - "print the list of keywords"), - ('provides', None, - "print the list of packages/modules provided"), - ('requires', None, - "print the list of packages/modules required"), - ('obsoletes', None, - "print the list of packages/modules made obsolete") - ] - display_option_names = [translate_longopt(x[0]) for x in display_options] - - # negative options are options that exclude other options - negative_opt = {'quiet': 'verbose'} - - # -- Creation/initialization methods ------------------------------- - - def __init__(self, attrs=None): - """Construct a new Distribution instance: initialize all the - attributes of a Distribution, and then use 'attrs' (a dictionary - mapping attribute names to values) to assign some of those - attributes their "real" values. (Any attributes not mentioned in - 'attrs' will be assigned to some null value: 0, None, an empty list - or dictionary, etc.) Most importantly, initialize the - 'command_obj' attribute to the empty dictionary; this will be - filled in with real command objects by 'parse_command_line()'. - """ - - # Default values for our command-line options - self.verbose = 1 - self.dry_run = 0 - self.help = 0 - for attr in self.display_option_names: - setattr(self, attr, 0) - - # Store the distribution meta-data (name, version, author, and so - # forth) in a separate object -- we're getting to have enough - # information here (and enough command-line options) that it's - # worth it. Also delegate 'get_XXX()' methods to the 'metadata' - # object in a sneaky and underhanded (but efficient!) way. - self.metadata = DistributionMetadata() - for basename in self.metadata._METHOD_BASENAMES: - method_name = "get_" + basename - setattr(self, method_name, getattr(self.metadata, method_name)) - - # 'cmdclass' maps command names to class objects, so we - # can 1) quickly figure out which class to instantiate when - # we need to create a new command object, and 2) have a way - # for the setup script to override command classes - self.cmdclass = {} - - # 'command_packages' is a list of packages in which commands - # are searched for. The factory for command 'foo' is expected - # to be named 'foo' in the module 'foo' in one of the packages - # named here. This list is searched from the left; an error - # is raised if no named package provides the command being - # searched for. (Always access using get_command_packages().) - self.command_packages = None - - # 'script_name' and 'script_args' are usually set to sys.argv[0] - # and sys.argv[1:], but they can be overridden when the caller is - # not necessarily a setup script run from the command-line. - self.script_name = None - self.script_args = None - - # 'command_options' is where we store command options between - # parsing them (from config files, the command-line, etc.) and when - # they are actually needed -- ie. when the command in question is - # instantiated. It is a dictionary of dictionaries of 2-tuples: - # command_options = { command_name : { option : (source, value) } } - self.command_options = {} - - # 'dist_files' is the list of (command, pyversion, file) that - # have been created by any dist commands run so far. This is - # filled regardless of whether the run is dry or not. pyversion - # gives sysconfig.get_python_version() if the dist file is - # specific to a Python version, 'any' if it is good for all - # Python versions on the target platform, and '' for a source - # file. pyversion should not be used to specify minimum or - # maximum required Python versions; use the metainfo for that - # instead. - self.dist_files = [] - - # These options are really the business of various commands, rather - # than of the Distribution itself. We provide aliases for them in - # Distribution as a convenience to the developer. - self.packages = None - self.package_data = {} - self.package_dir = None - self.py_modules = None - self.libraries = None - self.headers = None - self.ext_modules = None - self.ext_package = None - self.include_dirs = None - self.extra_path = None - self.scripts = None - self.data_files = None - self.password = '' - - # And now initialize bookkeeping stuff that can't be supplied by - # the caller at all. 'command_obj' maps command names to - # Command instances -- that's how we enforce that every command - # class is a singleton. - self.command_obj = {} - - # 'have_run' maps command names to boolean values; it keeps track - # of whether we have actually run a particular command, to make it - # cheap to "run" a command whenever we think we might need to -- if - # it's already been done, no need for expensive filesystem - # operations, we just check the 'have_run' dictionary and carry on. - # It's only safe to query 'have_run' for a command class that has - # been instantiated -- a false value will be inserted when the - # command object is created, and replaced with a true value when - # the command is successfully run. Thus it's probably best to use - # '.get()' rather than a straight lookup. - self.have_run = {} - - # Now we'll use the attrs dictionary (ultimately, keyword args from - # the setup script) to possibly override any or all of these - # distribution options. - - if attrs: - # Pull out the set of command options and work on them - # specifically. Note that this order guarantees that aliased - # command options will override any supplied redundantly - # through the general options dictionary. - options = attrs.get('options') - if options is not None: - del attrs['options'] - for (command, cmd_options) in options.items(): - opt_dict = self.get_option_dict(command) - for (opt, val) in cmd_options.items(): - opt_dict[opt] = ("setup script", val) - - if 'licence' in attrs: - attrs['license'] = attrs['licence'] - del attrs['licence'] - msg = "'licence' distribution option is deprecated; use 'license'" - if warnings is not None: - warnings.warn(msg) - else: - sys.stderr.write(msg + "\n") - - # Now work on the rest of the attributes. Any attribute that's - # not already defined is invalid! - for (key, val) in attrs.items(): - if hasattr(self.metadata, "set_" + key): - getattr(self.metadata, "set_" + key)(val) - elif hasattr(self.metadata, key): - setattr(self.metadata, key, val) - elif hasattr(self, key): - setattr(self, key, val) - else: - msg = "Unknown distribution option: %s" % repr(key) - warnings.warn(msg) - - # no-user-cfg is handled before other command line args - # because other args override the config files, and this - # one is needed before we can load the config files. - # If attrs['script_args'] wasn't passed, assume false. - # - # This also make sure we just look at the global options - self.want_user_cfg = True - - if self.script_args is not None: - for arg in self.script_args: - if not arg.startswith('-'): - break - if arg == '--no-user-cfg': - self.want_user_cfg = False - break - - self.finalize_options() - - def get_option_dict(self, command): - """Get the option dictionary for a given command. If that - command's option dictionary hasn't been created yet, then create it - and return the new dictionary; otherwise, return the existing - option dictionary. - """ - dict = self.command_options.get(command) - if dict is None: - dict = self.command_options[command] = {} - return dict - - def dump_option_dicts(self, header=None, commands=None, indent=""): - from pprint import pformat - - if commands is None: # dump all command option dicts - commands = sorted(self.command_options.keys()) - - if header is not None: - self.announce(indent + header) - indent = indent + " " - - if not commands: - self.announce(indent + "no commands known yet") - return - - for cmd_name in commands: - opt_dict = self.command_options.get(cmd_name) - if opt_dict is None: - self.announce(indent + - "no option dict for '%s' command" % cmd_name) - else: - self.announce(indent + - "option dict for '%s' command:" % cmd_name) - out = pformat(opt_dict) - for line in out.split('\n'): - self.announce(indent + " " + line) - - # -- Config file finding/parsing methods --------------------------- - - def find_config_files(self): - """Find as many configuration files as should be processed for this - platform, and return a list of filenames in the order in which they - should be parsed. The filenames returned are guaranteed to exist - (modulo nasty race conditions). - - There are three possible config files: distutils.cfg in the - Distutils installation directory (ie. where the top-level - Distutils __inst__.py file lives), a file in the user's home - directory named .pydistutils.cfg on Unix and pydistutils.cfg - on Windows/Mac; and setup.cfg in the current directory. - - The file in the user's home directory can be disabled with the - --no-user-cfg option. - """ - files = [] - check_environ() - - # Where to look for the system-wide Distutils config file - sys_dir = os.path.dirname(sys.modules['distutils'].__file__) - - # Look for the system config file - sys_file = os.path.join(sys_dir, "distutils.cfg") - if os.path.isfile(sys_file): - files.append(sys_file) - - # What to call the per-user config file - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - - # And look for the user config file - if self.want_user_cfg: - user_file = os.path.join(os.path.expanduser('~'), user_filename) - if os.path.isfile(user_file): - files.append(user_file) - - # All platforms support local setup.cfg - local_file = "setup.cfg" - if os.path.isfile(local_file): - files.append(local_file) - - if DEBUG: - self.announce("using config files: %s" % ', '.join(files)) - - return files - - def parse_config_files(self, filenames=None): - from configparser import ConfigParser - - # Ignore install directory options if we have a venv - if sys.prefix != sys.base_prefix: - ignore_options = [ - 'install-base', 'install-platbase', 'install-lib', - 'install-platlib', 'install-purelib', 'install-headers', - 'install-scripts', 'install-data', 'prefix', 'exec-prefix', - 'home', 'user', 'root'] - else: - ignore_options = [] - - ignore_options = frozenset(ignore_options) - - if filenames is None: - filenames = self.find_config_files() - - if DEBUG: - self.announce("Distribution.parse_config_files():") - - parser = ConfigParser() - for filename in filenames: - if DEBUG: - self.announce(" reading %s" % filename) - parser.read(filename) - for section in parser.sections(): - options = parser.options(section) - opt_dict = self.get_option_dict(section) - - for opt in options: - if opt != '__name__' and opt not in ignore_options: - val = parser.get(section,opt) - opt = opt.replace('-', '_') - opt_dict[opt] = (filename, val) - - # Make the ConfigParser forget everything (so we retain - # the original filenames that options come from) - parser.__init__() - - # If there was a "global" section in the config file, use it - # to set Distribution options. - - if 'global' in self.command_options: - for (opt, (src, val)) in self.command_options['global'].items(): - alias = self.negative_opt.get(opt) - try: - if alias: - setattr(self, alias, not strtobool(val)) - elif opt in ('verbose', 'dry_run'): # ugh! - setattr(self, opt, strtobool(val)) - else: - setattr(self, opt, val) - except ValueError as msg: - raise DistutilsOptionError(msg) - - # -- Command-line parsing methods ---------------------------------- - - def parse_command_line(self): - """Parse the setup script's command line, taken from the - 'script_args' instance attribute (which defaults to 'sys.argv[1:]' - -- see 'setup()' in core.py). This list is first processed for - "global options" -- options that set attributes of the Distribution - instance. Then, it is alternately scanned for Distutils commands - and options for that command. Each new command terminates the - options for the previous command. The allowed options for a - command are determined by the 'user_options' attribute of the - command class -- thus, we have to be able to load command classes - in order to parse the command line. Any error in that 'options' - attribute raises DistutilsGetoptError; any error on the - command-line raises DistutilsArgError. If no Distutils commands - were found on the command line, raises DistutilsArgError. Return - true if command-line was successfully parsed and we should carry - on with executing commands; false if no errors but we shouldn't - execute commands (currently, this only happens if user asks for - help). - """ - # - # We now have enough information to show the Macintosh dialog - # that allows the user to interactively specify the "command line". - # - toplevel_options = self._get_toplevel_options() - - # We have to parse the command line a bit at a time -- global - # options, then the first command, then its options, and so on -- - # because each command will be handled by a different class, and - # the options that are valid for a particular class aren't known - # until we have loaded the command class, which doesn't happen - # until we know what the command is. - - self.commands = [] - parser = FancyGetopt(toplevel_options + self.display_options) - parser.set_negative_aliases(self.negative_opt) - parser.set_aliases({'licence': 'license'}) - args = parser.getopt(args=self.script_args, object=self) - option_order = parser.get_option_order() - log.set_verbosity(self.verbose) - - # for display options we return immediately - if self.handle_display_options(option_order): - return - while args: - args = self._parse_command_opts(parser, args) - if args is None: # user asked for help (and got it) - return - - # Handle the cases of --help as a "global" option, ie. - # "setup.py --help" and "setup.py --help command ...". For the - # former, we show global options (--verbose, --dry-run, etc.) - # and display-only options (--name, --version, etc.); for the - # latter, we omit the display-only options and show help for - # each command listed on the command line. - if self.help: - self._show_help(parser, - display_options=len(self.commands) == 0, - commands=self.commands) - return - - # Oops, no commands found -- an end-user error - if not self.commands: - raise DistutilsArgError("no commands supplied") - - # All is well: return true - return True - - def _get_toplevel_options(self): - """Return the non-display options recognized at the top level. - - This includes options that are recognized *only* at the top - level as well as options recognized for commands. - """ - return self.global_options + [ - ("command-packages=", None, - "list of packages that provide distutils commands"), - ] - - def _parse_command_opts(self, parser, args): - """Parse the command-line options for a single command. - 'parser' must be a FancyGetopt instance; 'args' must be the list - of arguments, starting with the current command (whose options - we are about to parse). Returns a new version of 'args' with - the next command at the front of the list; will be the empty - list if there are no more commands on the command line. Returns - None if the user asked for help on this command. - """ - # late import because of mutual dependence between these modules - from distutils.cmd import Command - - # Pull the current command from the head of the command line - command = args[0] - if not command_re.match(command): - raise SystemExit("invalid command name '%s'" % command) - self.commands.append(command) - - # Dig up the command class that implements this command, so we - # 1) know that it's a valid command, and 2) know which options - # it takes. - try: - cmd_class = self.get_command_class(command) - except DistutilsModuleError as msg: - raise DistutilsArgError(msg) - - # Require that the command class be derived from Command -- want - # to be sure that the basic "command" interface is implemented. - if not issubclass(cmd_class, Command): - raise DistutilsClassError( - "command class %s must subclass Command" % cmd_class) - - # Also make sure that the command object provides a list of its - # known options. - if not (hasattr(cmd_class, 'user_options') and - isinstance(cmd_class.user_options, list)): - msg = ("command class %s must provide " - "'user_options' attribute (a list of tuples)") - raise DistutilsClassError(msg % cmd_class) - - # If the command class has a list of negative alias options, - # merge it in with the global negative aliases. - negative_opt = self.negative_opt - if hasattr(cmd_class, 'negative_opt'): - negative_opt = negative_opt.copy() - negative_opt.update(cmd_class.negative_opt) - - # Check for help_options in command class. They have a different - # format (tuple of four) so we need to preprocess them here. - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_options = fix_help_options(cmd_class.help_options) - else: - help_options = [] - - # All commands support the global options too, just by adding - # in 'global_options'. - parser.set_option_table(self.global_options + - cmd_class.user_options + - help_options) - parser.set_negative_aliases(negative_opt) - (args, opts) = parser.getopt(args[1:]) - if hasattr(opts, 'help') and opts.help: - self._show_help(parser, display_options=0, commands=[cmd_class]) - return - - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_option_found=0 - for (help_option, short, desc, func) in cmd_class.help_options: - if hasattr(opts, parser.get_attr_name(help_option)): - help_option_found=1 - if callable(func): - func() - else: - raise DistutilsClassError( - "invalid help function %r for help option '%s': " - "must be a callable object (function, etc.)" - % (func, help_option)) - - if help_option_found: - return - - # Put the options from the command-line into their official - # holding pen, the 'command_options' dictionary. - opt_dict = self.get_option_dict(command) - for (name, value) in vars(opts).items(): - opt_dict[name] = ("command line", value) - - return args - - def finalize_options(self): - """Set final values for all the options on the Distribution - instance, analogous to the .finalize_options() method of Command - objects. - """ - for attr in ('keywords', 'platforms'): - value = getattr(self.metadata, attr) - if value is None: - continue - if isinstance(value, str): - value = [elm.strip() for elm in value.split(',')] - setattr(self.metadata, attr, value) - - def _show_help(self, parser, global_options=1, display_options=1, - commands=[]): - """Show help for the setup script command-line in the form of - several lists of command-line options. 'parser' should be a - FancyGetopt instance; do not expect it to be returned in the - same state, as its option table will be reset to make it - generate the correct help text. - - If 'global_options' is true, lists the global options: - --verbose, --dry-run, etc. If 'display_options' is true, lists - the "display-only" options: --name, --version, etc. Finally, - lists per-command help for every command name or command class - in 'commands'. - """ - # late import because of mutual dependence between these modules - from distutils.core import gen_usage - from distutils.cmd import Command - - if global_options: - if display_options: - options = self._get_toplevel_options() - else: - options = self.global_options - parser.set_option_table(options) - parser.print_help(self.common_usage + "\nGlobal options:") - print('') - - if display_options: - parser.set_option_table(self.display_options) - parser.print_help( - "Information display options (just display " + - "information, ignore any commands)") - print('') - - for command in self.commands: - if isinstance(command, type) and issubclass(command, Command): - klass = command - else: - klass = self.get_command_class(command) - if (hasattr(klass, 'help_options') and - isinstance(klass.help_options, list)): - parser.set_option_table(klass.user_options + - fix_help_options(klass.help_options)) - else: - parser.set_option_table(klass.user_options) - parser.print_help("Options for '%s' command:" % klass.__name__) - print('') - - print(gen_usage(self.script_name)) - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - from distutils.core import gen_usage - - # User just wants a list of commands -- we'll print it out and stop - # processing now (ie. if they ran "setup --help-commands foo bar", - # we ignore "foo bar"). - if self.help_commands: - self.print_commands() - print('') - print(gen_usage(self.script_name)) - return 1 - - # If user supplied any of the "display metadata" options, then - # display that metadata in the order in which the user supplied the - # metadata options. - any_display_options = 0 - is_display_option = {} - for option in self.display_options: - is_display_option[option[0]] = 1 - - for (opt, val) in option_order: - if val and is_display_option.get(opt): - opt = translate_longopt(opt) - value = getattr(self.metadata, "get_"+opt)() - if opt in ['keywords', 'platforms']: - print(','.join(value)) - elif opt in ('classifiers', 'provides', 'requires', - 'obsoletes'): - print('\n'.join(value)) - else: - print(value) - any_display_options = 1 - - return any_display_options - - def print_command_list(self, commands, header, max_length): - """Print a subset of the list of all commands -- used by - 'print_commands()'. - """ - print(header + ":") - - for cmd in commands: - klass = self.cmdclass.get(cmd) - if not klass: - klass = self.get_command_class(cmd) - try: - description = klass.description - except AttributeError: - description = "(no description available)" - - print(" %-*s %s" % (max_length, cmd, description)) - - def print_commands(self): - """Print out a help message listing all available commands with a - description of each. The list is divided into "standard commands" - (listed in distutils.command.__all__) and "extra commands" - (mentioned in self.cmdclass, but not a standard command). The - descriptions come from the command class attribute - 'description'. - """ - import distutils.command - std_commands = distutils.command.__all__ - is_std = {} - for cmd in std_commands: - is_std[cmd] = 1 - - extra_commands = [] - for cmd in self.cmdclass.keys(): - if not is_std.get(cmd): - extra_commands.append(cmd) - - max_length = 0 - for cmd in (std_commands + extra_commands): - if len(cmd) > max_length: - max_length = len(cmd) - - self.print_command_list(std_commands, - "Standard commands", - max_length) - if extra_commands: - print() - self.print_command_list(extra_commands, - "Extra commands", - max_length) - - def get_command_list(self): - """Get a list of (command, description) tuples. - The list is divided into "standard commands" (listed in - distutils.command.__all__) and "extra commands" (mentioned in - self.cmdclass, but not a standard command). The descriptions come - from the command class attribute 'description'. - """ - # Currently this is only used on Mac OS, for the Mac-only GUI - # Distutils interface (by Jack Jansen) - import distutils.command - std_commands = distutils.command.__all__ - is_std = {} - for cmd in std_commands: - is_std[cmd] = 1 - - extra_commands = [] - for cmd in self.cmdclass.keys(): - if not is_std.get(cmd): - extra_commands.append(cmd) - - rv = [] - for cmd in (std_commands + extra_commands): - klass = self.cmdclass.get(cmd) - if not klass: - klass = self.get_command_class(cmd) - try: - description = klass.description - except AttributeError: - description = "(no description available)" - rv.append((cmd, description)) - return rv - - # -- Command class/object methods ---------------------------------- - - def get_command_packages(self): - """Return a list of packages from which commands are loaded.""" - pkgs = self.command_packages - if not isinstance(pkgs, list): - if pkgs is None: - pkgs = '' - pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != ''] - if "distutils.command" not in pkgs: - pkgs.insert(0, "distutils.command") - self.command_packages = pkgs - return pkgs - - def get_command_class(self, command): - """Return the class that implements the Distutils command named by - 'command'. First we check the 'cmdclass' dictionary; if the - command is mentioned there, we fetch the class object from the - dictionary and return it. Otherwise we load the command module - ("distutils.command." + command) and fetch the command class from - the module. The loaded class is also stored in 'cmdclass' - to speed future calls to 'get_command_class()'. - - Raises DistutilsModuleError if the expected module could not be - found, or if that module does not define the expected class. - """ - klass = self.cmdclass.get(command) - if klass: - return klass - - for pkgname in self.get_command_packages(): - module_name = "%s.%s" % (pkgname, command) - klass_name = command - - try: - __import__(module_name) - module = sys.modules[module_name] - except ImportError: - continue - - try: - klass = getattr(module, klass_name) - except AttributeError: - raise DistutilsModuleError( - "invalid command '%s' (no class '%s' in module '%s')" - % (command, klass_name, module_name)) - - self.cmdclass[command] = klass - return klass - - raise DistutilsModuleError("invalid command '%s'" % command) - - def get_command_obj(self, command, create=1): - """Return the command object for 'command'. Normally this object - is cached on a previous call to 'get_command_obj()'; if no command - object for 'command' is in the cache, then we either create and - return it (if 'create' is true) or return None. - """ - cmd_obj = self.command_obj.get(command) - if not cmd_obj and create: - if DEBUG: - self.announce("Distribution.get_command_obj(): " - "creating '%s' command object" % command) - - klass = self.get_command_class(command) - cmd_obj = self.command_obj[command] = klass(self) - self.have_run[command] = 0 - - # Set any options that were supplied in config files - # or on the command line. (NB. support for error - # reporting is lame here: any errors aren't reported - # until 'finalize_options()' is called, which means - # we won't report the source of the error.) - options = self.command_options.get(command) - if options: - self._set_command_options(cmd_obj, options) - - return cmd_obj - - def _set_command_options(self, command_obj, option_dict=None): - """Set the options for 'command_obj' from 'option_dict'. Basically - this means copying elements of a dictionary ('option_dict') to - attributes of an instance ('command'). - - 'command_obj' must be a Command instance. If 'option_dict' is not - supplied, uses the standard option dictionary for this command - (from 'self.command_options'). - """ - command_name = command_obj.get_command_name() - if option_dict is None: - option_dict = self.get_option_dict(command_name) - - if DEBUG: - self.announce(" setting options for '%s' command:" % command_name) - for (option, (source, value)) in option_dict.items(): - if DEBUG: - self.announce(" %s = %s (from %s)" % (option, value, - source)) - try: - bool_opts = [translate_longopt(o) - for o in command_obj.boolean_options] - except AttributeError: - bool_opts = [] - try: - neg_opt = command_obj.negative_opt - except AttributeError: - neg_opt = {} - - try: - is_string = isinstance(value, str) - if option in neg_opt and is_string: - setattr(command_obj, neg_opt[option], not strtobool(value)) - elif option in bool_opts and is_string: - setattr(command_obj, option, strtobool(value)) - elif hasattr(command_obj, option): - setattr(command_obj, option, value) - else: - raise DistutilsOptionError( - "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option)) - except ValueError as msg: - raise DistutilsOptionError(msg) - - def reinitialize_command(self, command, reinit_subcommands=0): - """Reinitializes a command to the state it was in when first - returned by 'get_command_obj()': ie., initialized but not yet - finalized. This provides the opportunity to sneak option - values in programmatically, overriding or supplementing - user-supplied values from the config files and command line. - You'll have to re-finalize the command object (by calling - 'finalize_options()' or 'ensure_finalized()') before using it for - real. - - 'command' should be a command name (string) or command object. If - 'reinit_subcommands' is true, also reinitializes the command's - sub-commands, as declared by the 'sub_commands' class attribute (if - it has one). See the "install" command for an example. Only - reinitializes the sub-commands that actually matter, ie. those - whose test predicates return true. - - Returns the reinitialized command object. - """ - from distutils.cmd import Command - if not isinstance(command, Command): - command_name = command - command = self.get_command_obj(command_name) - else: - command_name = command.get_command_name() - - if not command.finalized: - return command - command.initialize_options() - command.finalized = 0 - self.have_run[command_name] = 0 - self._set_command_options(command) - - if reinit_subcommands: - for sub in command.get_sub_commands(): - self.reinitialize_command(sub, reinit_subcommands) - - return command - - # -- Methods that operate on the Distribution ---------------------- - - def announce(self, msg, level=log.INFO): - log.log(level, msg) - - def run_commands(self): - """Run each command that was seen on the setup script command line. - Uses the list of commands found and cache of command objects - created by 'get_command_obj()'. - """ - for cmd in self.commands: - self.run_command(cmd) - - # -- Methods that operate on its Commands -------------------------- - - def run_command(self, command): - """Do whatever it takes to run a command (including nothing at all, - if the command has already been run). Specifically: if we have - already created and run the command named by 'command', return - silently without doing anything. If the command named by 'command' - doesn't even have a command object yet, create one. Then invoke - 'run()' on that command object (or an existing one). - """ - # Already been here, done that? then return silently. - if self.have_run.get(command): - return - - log.info("running %s", command) - cmd_obj = self.get_command_obj(command) - cmd_obj.ensure_finalized() - cmd_obj.run() - self.have_run[command] = 1 - - # -- Distribution query methods ------------------------------------ - - def has_pure_modules(self): - return len(self.packages or self.py_modules or []) > 0 - - def has_ext_modules(self): - return self.ext_modules and len(self.ext_modules) > 0 - - def has_c_libraries(self): - return self.libraries and len(self.libraries) > 0 - - def has_modules(self): - return self.has_pure_modules() or self.has_ext_modules() - - def has_headers(self): - return self.headers and len(self.headers) > 0 - - def has_scripts(self): - return self.scripts and len(self.scripts) > 0 - - def has_data_files(self): - return self.data_files and len(self.data_files) > 0 - - def is_pure(self): - return (self.has_pure_modules() and - not self.has_ext_modules() and - not self.has_c_libraries()) - - # -- Metadata query methods ---------------------------------------- - - # If you're looking for 'get_name()', 'get_version()', and so forth, - # they are defined in a sneaky way: the constructor binds self.get_XXX - # to self.metadata.get_XXX. The actual code is in the - # DistributionMetadata class, below. - -class DistributionMetadata: - """Dummy class to hold the distribution meta-data: name, version, - author, and so forth. - """ - - _METHOD_BASENAMES = ("name", "version", "author", "author_email", - "maintainer", "maintainer_email", "url", - "license", "description", "long_description", - "keywords", "platforms", "fullname", "contact", - "contact_email", "classifiers", "download_url", - # PEP 314 - "provides", "requires", "obsoletes", - ) - - def __init__(self, path=None): - if path is not None: - self.read_pkg_file(open(path)) - else: - self.name = None - self.version = None - self.author = None - self.author_email = None - self.maintainer = None - self.maintainer_email = None - self.url = None - self.license = None - self.description = None - self.long_description = None - self.keywords = None - self.platforms = None - self.classifiers = None - self.download_url = None - # PEP 314 - self.provides = None - self.requires = None - self.obsoletes = None - - def read_pkg_file(self, file): - """Reads the metadata values from a file object.""" - msg = message_from_file(file) - - def _read_field(name): - value = msg[name] - if value == 'UNKNOWN': - return None - return value - - def _read_list(name): - values = msg.get_all(name, None) - if values == []: - return None - return values - - metadata_version = msg['metadata-version'] - self.name = _read_field('name') - self.version = _read_field('version') - self.description = _read_field('summary') - # we are filling author only. - self.author = _read_field('author') - self.maintainer = None - self.author_email = _read_field('author-email') - self.maintainer_email = None - self.url = _read_field('home-page') - self.license = _read_field('license') - - if 'download-url' in msg: - self.download_url = _read_field('download-url') - else: - self.download_url = None - - self.long_description = _read_field('description') - self.description = _read_field('summary') - - if 'keywords' in msg: - self.keywords = _read_field('keywords').split(',') - - self.platforms = _read_list('platform') - self.classifiers = _read_list('classifier') - - # PEP 314 - these fields only exist in 1.1 - if metadata_version == '1.1': - self.requires = _read_list('requires') - self.provides = _read_list('provides') - self.obsoletes = _read_list('obsoletes') - else: - self.requires = None - self.provides = None - self.obsoletes = None - - def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: - self.write_pkg_file(pkg_info) - - def write_pkg_file(self, file): - """Write the PKG-INFO format data to a file object. - """ - version = '1.0' - if (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): - version = '1.1' - - file.write('Metadata-Version: %s\n' % version) - file.write('Name: %s\n' % self.get_name()) - file.write('Version: %s\n' % self.get_version()) - file.write('Summary: %s\n' % self.get_description()) - file.write('Home-page: %s\n' % self.get_url()) - file.write('Author: %s\n' % self.get_contact()) - file.write('Author-email: %s\n' % self.get_contact_email()) - file.write('License: %s\n' % self.get_license()) - if self.download_url: - file.write('Download-URL: %s\n' % self.download_url) - - long_desc = rfc822_escape(self.get_long_description()) - file.write('Description: %s\n' % long_desc) - - keywords = ','.join(self.get_keywords()) - if keywords: - file.write('Keywords: %s\n' % keywords) - - self._write_list(file, 'Platform', self.get_platforms()) - self._write_list(file, 'Classifier', self.get_classifiers()) - - # PEP 314 - self._write_list(file, 'Requires', self.get_requires()) - self._write_list(file, 'Provides', self.get_provides()) - self._write_list(file, 'Obsoletes', self.get_obsoletes()) - - def _write_list(self, file, name, values): - for value in values: - file.write('%s: %s\n' % (name, value)) - - # -- Metadata query methods ---------------------------------------- - - def get_name(self): - return self.name or "UNKNOWN" - - def get_version(self): - return self.version or "0.0.0" - - def get_fullname(self): - return "%s-%s" % (self.get_name(), self.get_version()) - - def get_author(self): - return self.author or "UNKNOWN" - - def get_author_email(self): - return self.author_email or "UNKNOWN" - - def get_maintainer(self): - return self.maintainer or "UNKNOWN" - - def get_maintainer_email(self): - return self.maintainer_email or "UNKNOWN" - - def get_contact(self): - return self.maintainer or self.author or "UNKNOWN" - - def get_contact_email(self): - return self.maintainer_email or self.author_email or "UNKNOWN" - - def get_url(self): - return self.url or "UNKNOWN" - - def get_license(self): - return self.license or "UNKNOWN" - get_licence = get_license - - def get_description(self): - return self.description or "UNKNOWN" - - def get_long_description(self): - return self.long_description or "UNKNOWN" - - def get_keywords(self): - return self.keywords or [] - - def set_keywords(self, value): - self.keywords = _ensure_list(value, 'keywords') - - def get_platforms(self): - return self.platforms or ["UNKNOWN"] - - def set_platforms(self, value): - self.platforms = _ensure_list(value, 'platforms') - - def get_classifiers(self): - return self.classifiers or [] - - def set_classifiers(self, value): - self.classifiers = _ensure_list(value, 'classifiers') - - def get_download_url(self): - return self.download_url or "UNKNOWN" - - # PEP 314 - def get_requires(self): - return self.requires or [] - - def set_requires(self, value): - import distutils.versionpredicate - for v in value: - distutils.versionpredicate.VersionPredicate(v) - self.requires = list(value) - - def get_provides(self): - return self.provides or [] - - def set_provides(self, value): - value = [v.strip() for v in value] - for v in value: - import distutils.versionpredicate - distutils.versionpredicate.split_provision(v) - self.provides = value - - def get_obsoletes(self): - return self.obsoletes or [] - - def set_obsoletes(self, value): - import distutils.versionpredicate - for v in value: - distutils.versionpredicate.VersionPredicate(v) - self.obsoletes = list(value) - -def fix_help_options(options): - """Convert a 4-tuple 'help_options' list as found in various command - classes to the 3-tuple form required by FancyGetopt. - """ - new_options = [] - for help_tuple in options: - new_options.append(help_tuple[0:3]) - return new_options diff --git a/Lib/distutils/errors.py b/Lib/distutils/errors.py deleted file mode 100644 index 8b93059e19faa9..00000000000000 --- a/Lib/distutils/errors.py +++ /dev/null @@ -1,97 +0,0 @@ -"""distutils.errors - -Provides exceptions used by the Distutils modules. Note that Distutils -modules may raise standard exceptions; in particular, SystemExit is -usually raised for errors that are obviously the end-user's fault -(eg. bad command-line arguments). - -This module is safe to use in "from ... import *" mode; it only exports -symbols whose names start with "Distutils" and end with "Error".""" - -class DistutilsError (Exception): - """The root of all Distutils evil.""" - pass - -class DistutilsModuleError (DistutilsError): - """Unable to load an expected module, or to find an expected class - within some module (in particular, command modules and classes).""" - pass - -class DistutilsClassError (DistutilsError): - """Some command class (or possibly distribution class, if anyone - feels a need to subclass Distribution) is found not to be holding - up its end of the bargain, ie. implementing some part of the - "command "interface.""" - pass - -class DistutilsGetoptError (DistutilsError): - """The option table provided to 'fancy_getopt()' is bogus.""" - pass - -class DistutilsArgError (DistutilsError): - """Raised by fancy_getopt in response to getopt.error -- ie. an - error in the command line usage.""" - pass - -class DistutilsFileError (DistutilsError): - """Any problems in the filesystem: expected file not found, etc. - Typically this is for problems that we detect before OSError - could be raised.""" - pass - -class DistutilsOptionError (DistutilsError): - """Syntactic/semantic errors in command options, such as use of - mutually conflicting options, or inconsistent options, - badly-spelled values, etc. No distinction is made between option - values originating in the setup script, the command line, config - files, or what-have-you -- but if we *know* something originated in - the setup script, we'll raise DistutilsSetupError instead.""" - pass - -class DistutilsSetupError (DistutilsError): - """For errors that can be definitely blamed on the setup script, - such as invalid keyword arguments to 'setup()'.""" - pass - -class DistutilsPlatformError (DistutilsError): - """We don't know how to do something on the current platform (but - we do know how to do it on some platform) -- eg. trying to compile - C files on a platform not supported by a CCompiler subclass.""" - pass - -class DistutilsExecError (DistutilsError): - """Any problems executing an external program (such as the C - compiler, when compiling C files).""" - pass - -class DistutilsInternalError (DistutilsError): - """Internal inconsistencies or impossibilities (obviously, this - should never be seen if the code is working!).""" - pass - -class DistutilsTemplateError (DistutilsError): - """Syntax error in a file list template.""" - -class DistutilsByteCompileError(DistutilsError): - """Byte compile error.""" - -# Exception classes used by the CCompiler implementation classes -class CCompilerError (Exception): - """Some compile/link operation failed.""" - -class PreprocessError (CCompilerError): - """Failure to preprocess one or more C/C++ files.""" - -class CompileError (CCompilerError): - """Failure to compile one or more C/C++ source files.""" - -class LibError (CCompilerError): - """Failure to create a static library from one or more C/C++ object - files.""" - -class LinkError (CCompilerError): - """Failure to link one or more C/C++ object files into an executable - or shared library file.""" - -class UnknownFileError (CCompilerError): - """Attempt to process an unknown file type.""" diff --git a/Lib/distutils/extension.py b/Lib/distutils/extension.py deleted file mode 100644 index e85032ece8916f..00000000000000 --- a/Lib/distutils/extension.py +++ /dev/null @@ -1,241 +0,0 @@ -"""distutils.extension - -Provides the Extension class, used to describe C/C++ extension -modules in setup scripts.""" - -import os -import re -import warnings - -# This class is really only used by the "build_ext" command, so it might -# make sense to put it in distutils.command.build_ext. However, that -# module is already big enough, and I want to make this class a bit more -# complex to simplify some common cases ("foo" module in "foo.c") and do -# better error-checking ("foo.c" actually exists). -# -# Also, putting this in build_ext.py means every setup script would have to -# import that large-ish module (indirectly, through distutils.core) in -# order to do anything. - -class Extension: - """Just a collection of attributes that describes an extension - module and everything needed to build it (hopefully in a portable - way, but there are hooks that let you be as unportable as you need). - - Instance attributes: - name : string - the full name of the extension, including any packages -- ie. - *not* a filename or pathname, but Python dotted name - sources : [string] - list of source filenames, relative to the distribution root - (where the setup script lives), in Unix form (slash-separated) - for portability. Source files may be C, C++, SWIG (.i), - platform-specific resource files, or whatever else is recognized - by the "build_ext" command as source for a Python extension. - include_dirs : [string] - list of directories to search for C/C++ header files (in Unix - form for portability) - define_macros : [(name : string, value : string|None)] - list of macros to define; each macro is defined using a 2-tuple, - where 'value' is either the string to define it to or None to - define it without a particular value (equivalent of "#define - FOO" in source or -DFOO on Unix C compiler command line) - undef_macros : [string] - list of macros to undefine explicitly - library_dirs : [string] - list of directories to search for C/C++ libraries at link time - libraries : [string] - list of library names (not filenames or paths) to link against - runtime_library_dirs : [string] - list of directories to search for C/C++ libraries at run time - (for shared extensions, this is when the extension is loaded) - extra_objects : [string] - list of extra files to link with (eg. object files not implied - by 'sources', static library that must be explicitly specified, - binary resource files, etc.) - extra_compile_args : [string] - any extra platform- and compiler-specific information to use - when compiling the source files in 'sources'. For platforms and - compilers where "command line" makes sense, this is typically a - list of command-line arguments, but for other platforms it could - be anything. - extra_link_args : [string] - any extra platform- and compiler-specific information to use - when linking object files together to create the extension (or - to create a new static Python interpreter). Similar - interpretation as for 'extra_compile_args'. - export_symbols : [string] - list of symbols to be exported from a shared extension. Not - used on all platforms, and not generally necessary for Python - extensions, which typically export exactly one symbol: "init" + - extension_name. - swig_opts : [string] - any extra options to pass to SWIG if a source file has the .i - extension. - depends : [string] - list of files that the extension depends on - language : string - extension language (i.e. "c", "c++", "objc"). Will be detected - from the source extensions if not provided. - optional : boolean - specifies that a build failure in the extension should not abort the - build process, but simply not install the failing extension. - """ - - # When adding arguments to this constructor, be sure to update - # setup_keywords in core.py. - def __init__(self, name, sources, - include_dirs=None, - define_macros=None, - undef_macros=None, - library_dirs=None, - libraries=None, - runtime_library_dirs=None, - extra_objects=None, - extra_compile_args=None, - extra_link_args=None, - export_symbols=None, - swig_opts = None, - depends=None, - language=None, - optional=None, - **kw # To catch unknown keywords - ): - if not isinstance(name, str): - raise AssertionError("'name' must be a string") - if not (isinstance(sources, list) and - all(isinstance(v, str) for v in sources)): - raise AssertionError("'sources' must be a list of strings") - - self.name = name - self.sources = sources - self.include_dirs = include_dirs or [] - self.define_macros = define_macros or [] - self.undef_macros = undef_macros or [] - self.library_dirs = library_dirs or [] - self.libraries = libraries or [] - self.runtime_library_dirs = runtime_library_dirs or [] - self.extra_objects = extra_objects or [] - self.extra_compile_args = extra_compile_args or [] - self.extra_link_args = extra_link_args or [] - self.export_symbols = export_symbols or [] - self.swig_opts = swig_opts or [] - self.depends = depends or [] - self.language = language - self.optional = optional - - # If there are unknown keyword options, warn about them - if len(kw) > 0: - options = [repr(option) for option in kw] - options = ', '.join(sorted(options)) - msg = "Unknown Extension options: %s" % options - warnings.warn(msg) - - def __repr__(self): - return '<%s.%s(%r) at %#x>' % ( - self.__class__.__module__, - self.__class__.__qualname__, - self.name, - id(self)) - - -def read_setup_file(filename): - """Reads a Setup file and returns Extension instances.""" - from distutils.sysconfig import (parse_makefile, expand_makefile_vars, - _variable_rx) - - from distutils.text_file import TextFile - from distutils.util import split_quoted - - # First pass over the file to gather "VAR = VALUE" assignments. - vars = parse_makefile(filename) - - # Second pass to gobble up the real content: lines of the form - # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] - file = TextFile(filename, - strip_comments=1, skip_blanks=1, join_lines=1, - lstrip_ws=1, rstrip_ws=1) - try: - extensions = [] - - while True: - line = file.readline() - if line is None: # eof - break - if re.match(_variable_rx, line): # VAR=VALUE, handled in first pass - continue - - if line[0] == line[-1] == "*": - file.warn("'%s' lines not handled yet" % line) - continue - - line = expand_makefile_vars(line, vars) - words = split_quoted(line) - - # NB. this parses a slightly different syntax than the old - # makesetup script: here, there must be exactly one extension per - # line, and it must be the first word of the line. I have no idea - # why the old syntax supported multiple extensions per line, as - # they all wind up being the same. - - module = words[0] - ext = Extension(module, []) - append_next_word = None - - for word in words[1:]: - if append_next_word is not None: - append_next_word.append(word) - append_next_word = None - continue - - suffix = os.path.splitext(word)[1] - switch = word[0:2] ; value = word[2:] - - if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): - # hmm, should we do something about C vs. C++ sources? - # or leave it up to the CCompiler implementation to - # worry about? - ext.sources.append(word) - elif switch == "-I": - ext.include_dirs.append(value) - elif switch == "-D": - equals = value.find("=") - if equals == -1: # bare "-DFOO" -- no value - ext.define_macros.append((value, None)) - else: # "-DFOO=blah" - ext.define_macros.append((value[0:equals], - value[equals+2:])) - elif switch == "-U": - ext.undef_macros.append(value) - elif switch == "-C": # only here 'cause makesetup has it! - ext.extra_compile_args.append(word) - elif switch == "-l": - ext.libraries.append(value) - elif switch == "-L": - ext.library_dirs.append(value) - elif switch == "-R": - ext.runtime_library_dirs.append(value) - elif word == "-rpath": - append_next_word = ext.runtime_library_dirs - elif word == "-Xlinker": - append_next_word = ext.extra_link_args - elif word == "-Xcompiler": - append_next_word = ext.extra_compile_args - elif switch == "-u": - ext.extra_link_args.append(word) - if not value: - append_next_word = ext.extra_link_args - elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): - # NB. a really faithful emulation of makesetup would - # append a .o file to extra_objects only if it - # had a slash in it; otherwise, it would s/.o/.c/ - # and append it to sources. Hmmmm. - ext.extra_objects.append(word) - else: - file.warn("unrecognized argument '%s'" % word) - - extensions.append(ext) - finally: - file.close() - - return extensions diff --git a/Lib/distutils/fancy_getopt.py b/Lib/distutils/fancy_getopt.py deleted file mode 100644 index 7d170dd27731a5..00000000000000 --- a/Lib/distutils/fancy_getopt.py +++ /dev/null @@ -1,457 +0,0 @@ -"""distutils.fancy_getopt - -Wrapper around the standard getopt module that provides the following -additional features: - * short and long options are tied together - * options have help strings, so fancy_getopt could potentially - create a complete usage summary - * options set attributes of a passed-in object -""" - -import sys, string, re -import getopt -from distutils.errors import * - -# Much like command_re in distutils.core, this is close to but not quite -# the same as a Python NAME -- except, in the spirit of most GNU -# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!) -# The similarities to NAME are again not a coincidence... -longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)' -longopt_re = re.compile(r'^%s$' % longopt_pat) - -# For recognizing "negative alias" options, eg. "quiet=!verbose" -neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) - -# This is used to translate long options to legitimate Python identifiers -# (for use as attributes of some object). -longopt_xlate = str.maketrans('-', '_') - -class FancyGetopt: - """Wrapper around the standard 'getopt()' module that provides some - handy extra functionality: - * short and long options are tied together - * options have help strings, and help text can be assembled - from them - * options set attributes of a passed-in object - * boolean options can have "negative aliases" -- eg. if - --quiet is the "negative alias" of --verbose, then "--quiet" - on the command line sets 'verbose' to false - """ - - def __init__(self, option_table=None): - # The option table is (currently) a list of tuples. The - # tuples may have 3 or four values: - # (long_option, short_option, help_string [, repeatable]) - # if an option takes an argument, its long_option should have '=' - # appended; short_option should just be a single character, no ':' - # in any case. If a long_option doesn't have a corresponding - # short_option, short_option should be None. All option tuples - # must have long options. - self.option_table = option_table - - # 'option_index' maps long option names to entries in the option - # table (ie. those 3-tuples). - self.option_index = {} - if self.option_table: - self._build_index() - - # 'alias' records (duh) alias options; {'foo': 'bar'} means - # --foo is an alias for --bar - self.alias = {} - - # 'negative_alias' keeps track of options that are the boolean - # opposite of some other option - self.negative_alias = {} - - # These keep track of the information in the option table. We - # don't actually populate these structures until we're ready to - # parse the command-line, since the 'option_table' passed in here - # isn't necessarily the final word. - self.short_opts = [] - self.long_opts = [] - self.short2long = {} - self.attr_name = {} - self.takes_arg = {} - - # And 'option_order' is filled up in 'getopt()'; it records the - # original order of options (and their values) on the command-line, - # but expands short options, converts aliases, etc. - self.option_order = [] - - def _build_index(self): - self.option_index.clear() - for option in self.option_table: - self.option_index[option[0]] = option - - def set_option_table(self, option_table): - self.option_table = option_table - self._build_index() - - def add_option(self, long_option, short_option=None, help_string=None): - if long_option in self.option_index: - raise DistutilsGetoptError( - "option conflict: already an option '%s'" % long_option) - else: - option = (long_option, short_option, help_string) - self.option_table.append(option) - self.option_index[long_option] = option - - def has_option(self, long_option): - """Return true if the option table for this parser has an - option with long name 'long_option'.""" - return long_option in self.option_index - - def get_attr_name(self, long_option): - """Translate long option name 'long_option' to the form it - has as an attribute of some object: ie., translate hyphens - to underscores.""" - return long_option.translate(longopt_xlate) - - def _check_alias_dict(self, aliases, what): - assert isinstance(aliases, dict) - for (alias, opt) in aliases.items(): - if alias not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "option '%s' not defined") % (what, alias, alias)) - if opt not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "aliased option '%s' not defined") % (what, alias, opt)) - - def set_aliases(self, alias): - """Set the aliases for this option parser.""" - self._check_alias_dict(alias, "alias") - self.alias = alias - - def set_negative_aliases(self, negative_alias): - """Set the negative aliases for this option parser. - 'negative_alias' should be a dictionary mapping option names to - option names, both the key and value must already be defined - in the option table.""" - self._check_alias_dict(negative_alias, "negative alias") - self.negative_alias = negative_alias - - def _grok_option_table(self): - """Populate the various data structures that keep tabs on the - option table. Called by 'getopt()' before it can do anything - worthwhile. - """ - self.long_opts = [] - self.short_opts = [] - self.short2long.clear() - self.repeat = {} - - for option in self.option_table: - if len(option) == 3: - long, short, help = option - repeat = 0 - elif len(option) == 4: - long, short, help, repeat = option - else: - # the option table is part of the code, so simply - # assert that it is correct - raise ValueError("invalid option tuple: %r" % (option,)) - - # Type- and value-check the option names - if not isinstance(long, str) or len(long) < 2: - raise DistutilsGetoptError(("invalid long option '%s': " - "must be a string of length >= 2") % long) - - if (not ((short is None) or - (isinstance(short, str) and len(short) == 1))): - raise DistutilsGetoptError("invalid short option '%s': " - "must a single character or None" % short) - - self.repeat[long] = repeat - self.long_opts.append(long) - - if long[-1] == '=': # option takes an argument? - if short: short = short + ':' - long = long[0:-1] - self.takes_arg[long] = 1 - else: - # Is option is a "negative alias" for some other option (eg. - # "quiet" == "!verbose")? - alias_to = self.negative_alias.get(long) - if alias_to is not None: - if self.takes_arg[alias_to]: - raise DistutilsGetoptError( - "invalid negative alias '%s': " - "aliased option '%s' takes a value" - % (long, alias_to)) - - self.long_opts[-1] = long # XXX redundant?! - self.takes_arg[long] = 0 - - # If this is an alias option, make sure its "takes arg" flag is - # the same as the option it's aliased to. - alias_to = self.alias.get(long) - if alias_to is not None: - if self.takes_arg[long] != self.takes_arg[alias_to]: - raise DistutilsGetoptError( - "invalid alias '%s': inconsistent with " - "aliased option '%s' (one of them takes a value, " - "the other doesn't" - % (long, alias_to)) - - # Now enforce some bondage on the long option name, so we can - # later translate it to an attribute name on some object. Have - # to do this a bit late to make sure we've removed any trailing - # '='. - if not longopt_re.match(long): - raise DistutilsGetoptError( - "invalid long option name '%s' " - "(must be letters, numbers, hyphens only" % long) - - self.attr_name[long] = self.get_attr_name(long) - if short: - self.short_opts.append(short) - self.short2long[short[0]] = long - - def getopt(self, args=None, object=None): - """Parse command-line options in args. Store as attributes on object. - - If 'args' is None or not supplied, uses 'sys.argv[1:]'. If - 'object' is None or not supplied, creates a new OptionDummy - object, stores option values there, and returns a tuple (args, - object). If 'object' is supplied, it is modified in place and - 'getopt()' just returns 'args'; in both cases, the returned - 'args' is a modified copy of the passed-in 'args' list, which - is left untouched. - """ - if args is None: - args = sys.argv[1:] - if object is None: - object = OptionDummy() - created_object = True - else: - created_object = False - - self._grok_option_table() - - short_opts = ' '.join(self.short_opts) - try: - opts, args = getopt.getopt(args, short_opts, self.long_opts) - except getopt.error as msg: - raise DistutilsArgError(msg) - - for opt, val in opts: - if len(opt) == 2 and opt[0] == '-': # it's a short option - opt = self.short2long[opt[1]] - else: - assert len(opt) > 2 and opt[:2] == '--' - opt = opt[2:] - - alias = self.alias.get(opt) - if alias: - opt = alias - - if not self.takes_arg[opt]: # boolean option? - assert val == '', "boolean option can't have value" - alias = self.negative_alias.get(opt) - if alias: - opt = alias - val = 0 - else: - val = 1 - - attr = self.attr_name[opt] - # The only repeating option at the moment is 'verbose'. - # It has a negative option -q quiet, which should set verbose = 0. - if val and self.repeat.get(attr) is not None: - val = getattr(object, attr, 0) + 1 - setattr(object, attr, val) - self.option_order.append((opt, val)) - - # for opts - if created_object: - return args, object - else: - return args - - def get_option_order(self): - """Returns the list of (option, value) tuples processed by the - previous run of 'getopt()'. Raises RuntimeError if - 'getopt()' hasn't been called yet. - """ - if self.option_order is None: - raise RuntimeError("'getopt()' hasn't been called yet") - else: - return self.option_order - - def generate_help(self, header=None): - """Generate help text (a list of strings, one per suggested line of - output) from the option table for this FancyGetopt object. - """ - # Blithely assume the option table is good: probably wouldn't call - # 'generate_help()' unless you've already called 'getopt()'. - - # First pass: determine maximum length of long option names - max_opt = 0 - for option in self.option_table: - long = option[0] - short = option[1] - l = len(long) - if long[-1] == '=': - l = l - 1 - if short is not None: - l = l + 5 # " (-x)" where short == 'x' - if l > max_opt: - max_opt = l - - opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter - - # Typical help block looks like this: - # --foo controls foonabulation - # Help block for longest option looks like this: - # --flimflam set the flim-flam level - # and with wrapped text: - # --flimflam set the flim-flam level (must be between - # 0 and 100, except on Tuesdays) - # Options with short names will have the short name shown (but - # it doesn't contribute to max_opt): - # --foo (-f) controls foonabulation - # If adding the short option would make the left column too wide, - # we push the explanation off to the next line - # --flimflam (-l) - # set the flim-flam level - # Important parameters: - # - 2 spaces before option block start lines - # - 2 dashes for each long option name - # - min. 2 spaces between option and explanation (gutter) - # - 5 characters (incl. space) for short option name - - # Now generate lines of help text. (If 80 columns were good enough - # for Jesus, then 78 columns are good enough for me!) - line_width = 78 - text_width = line_width - opt_width - big_indent = ' ' * opt_width - if header: - lines = [header] - else: - lines = ['Option summary:'] - - for option in self.option_table: - long, short, help = option[:3] - text = wrap_text(help, text_width) - if long[-1] == '=': - long = long[0:-1] - - # Case 1: no short option at all (makes life easy) - if short is None: - if text: - lines.append(" --%-*s %s" % (max_opt, long, text[0])) - else: - lines.append(" --%-*s " % (max_opt, long)) - - # Case 2: we have a short option, so we have to include it - # just after the long option - else: - opt_names = "%s (-%s)" % (long, short) - if text: - lines.append(" --%-*s %s" % - (max_opt, opt_names, text[0])) - else: - lines.append(" --%-*s" % opt_names) - - for l in text[1:]: - lines.append(big_indent + l) - return lines - - def print_help(self, header=None, file=None): - if file is None: - file = sys.stdout - for line in self.generate_help(header): - file.write(line + "\n") - - -def fancy_getopt(options, negative_opt, object, args): - parser = FancyGetopt(options) - parser.set_negative_aliases(negative_opt) - return parser.getopt(args, object) - - -WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace} - -def wrap_text(text, width): - """wrap_text(text : string, width : int) -> [string] - - Split 'text' into multiple lines of no more than 'width' characters - each, and return the list of strings that results. - """ - if text is None: - return [] - if len(text) <= width: - return [text] - - text = text.expandtabs() - text = text.translate(WS_TRANS) - chunks = re.split(r'( +|-+)', text) - chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings - lines = [] - - while chunks: - cur_line = [] # list of chunks (to-be-joined) - cur_len = 0 # length of current line - - while chunks: - l = len(chunks[0]) - if cur_len + l <= width: # can squeeze (at least) this chunk in - cur_line.append(chunks[0]) - del chunks[0] - cur_len = cur_len + l - else: # this line is full - # drop last chunk if all space - if cur_line and cur_line[-1][0] == ' ': - del cur_line[-1] - break - - if chunks: # any chunks left to process? - # if the current line is still empty, then we had a single - # chunk that's too big too fit on a line -- so we break - # down and break it up at the line width - if cur_len == 0: - cur_line.append(chunks[0][0:width]) - chunks[0] = chunks[0][width:] - - # all-whitespace chunks at the end of a line can be discarded - # (and we know from the re.split above that if a chunk has - # *any* whitespace, it is *all* whitespace) - if chunks[0][0] == ' ': - del chunks[0] - - # and store this line in the list-of-all-lines -- as a single - # string, of course! - lines.append(''.join(cur_line)) - - return lines - - -def translate_longopt(opt): - """Convert a long option name to a valid Python identifier by - changing "-" to "_". - """ - return opt.translate(longopt_xlate) - - -class OptionDummy: - """Dummy class just used as a place to hold command-line option - values as instance attributes.""" - - def __init__(self, options=[]): - """Create a new OptionDummy instance. The attributes listed in - 'options' will be initialized to None.""" - for opt in options: - setattr(self, opt, None) - - -if __name__ == "__main__": - text = """\ -Tra-la-la, supercalifragilisticexpialidocious. -How *do* you spell that odd word, anyways? -(Someone ask Mary -- she'll know [or she'll -say, "How should I know?"].)""" - - for w in (10, 20, 30, 40): - print("width: %d" % w) - print("\n".join(wrap_text(text, w))) - print() diff --git a/Lib/distutils/file_util.py b/Lib/distutils/file_util.py deleted file mode 100644 index b3fee35a6cce81..00000000000000 --- a/Lib/distutils/file_util.py +++ /dev/null @@ -1,238 +0,0 @@ -"""distutils.file_util - -Utility functions for operating on single files. -""" - -import os -from distutils.errors import DistutilsFileError -from distutils import log - -# for generating verbose output in 'copy_file()' -_copy_action = { None: 'copying', - 'hard': 'hard linking', - 'sym': 'symbolically linking' } - - -def _copy_file_contents(src, dst, buffer_size=16*1024): - """Copy the file 'src' to 'dst'; both must be filenames. Any error - opening either file, reading from 'src', or writing to 'dst', raises - DistutilsFileError. Data is read/written in chunks of 'buffer_size' - bytes (default 16k). No attempt is made to handle anything apart from - regular files. - """ - # Stolen from shutil module in the standard library, but with - # custom error-handling added. - fsrc = None - fdst = None - try: - try: - fsrc = open(src, 'rb') - except OSError as e: - raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror)) - - if os.path.exists(dst): - try: - os.unlink(dst) - except OSError as e: - raise DistutilsFileError( - "could not delete '%s': %s" % (dst, e.strerror)) - - try: - fdst = open(dst, 'wb') - except OSError as e: - raise DistutilsFileError( - "could not create '%s': %s" % (dst, e.strerror)) - - while True: - try: - buf = fsrc.read(buffer_size) - except OSError as e: - raise DistutilsFileError( - "could not read from '%s': %s" % (src, e.strerror)) - - if not buf: - break - - try: - fdst.write(buf) - except OSError as e: - raise DistutilsFileError( - "could not write to '%s': %s" % (dst, e.strerror)) - finally: - if fdst: - fdst.close() - if fsrc: - fsrc.close() - -def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, - link=None, verbose=1, dry_run=0): - """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is - copied there with the same name; otherwise, it must be a filename. (If - the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' - is true (the default), the file's mode (type and permission bits, or - whatever is analogous on the current platform) is copied. If - 'preserve_times' is true (the default), the last-modified and - last-access times are copied as well. If 'update' is true, 'src' will - only be copied if 'dst' does not exist, or if 'dst' does exist but is - older than 'src'. - - 'link' allows you to make hard links (os.link) or symbolic links - (os.symlink) instead of copying: set it to "hard" or "sym"; if it is - None (the default), files are copied. Don't set 'link' on systems that - don't support it: 'copy_file()' doesn't check if hard or symbolic - linking is available. If hardlink fails, falls back to - _copy_file_contents(). - - Under Mac OS, uses the native file copy function in macostools; on - other systems, uses '_copy_file_contents()' to copy file contents. - - Return a tuple (dest_name, copied): 'dest_name' is the actual name of - the output file, and 'copied' is true if the file was copied (or would - have been copied, if 'dry_run' true). - """ - # XXX if the destination file already exists, we clobber it if - # copying, but blow up if linking. Hmmm. And I don't know what - # macostools.copyfile() does. Should definitely be consistent, and - # should probably blow up if destination exists and we would be - # changing it (ie. it's not already a hard/soft link to src OR - # (not update) and (src newer than dst). - - from distutils.dep_util import newer - from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE - - if not os.path.isfile(src): - raise DistutilsFileError( - "can't copy '%s': doesn't exist or not a regular file" % src) - - if os.path.isdir(dst): - dir = dst - dst = os.path.join(dst, os.path.basename(src)) - else: - dir = os.path.dirname(dst) - - if update and not newer(src, dst): - if verbose >= 1: - log.debug("not copying %s (output up-to-date)", src) - return (dst, 0) - - try: - action = _copy_action[link] - except KeyError: - raise ValueError("invalid value '%s' for 'link' argument" % link) - - if verbose >= 1: - if os.path.basename(dst) == os.path.basename(src): - log.info("%s %s -> %s", action, src, dir) - else: - log.info("%s %s -> %s", action, src, dst) - - if dry_run: - return (dst, 1) - - # If linking (hard or symbolic), use the appropriate system call - # (Unix only, of course, but that's the caller's responsibility) - elif link == 'hard': - if not (os.path.exists(dst) and os.path.samefile(src, dst)): - try: - os.link(src, dst) - return (dst, 1) - except OSError: - # If hard linking fails, fall back on copying file - # (some special filesystems don't support hard linking - # even under Unix, see issue #8876). - pass - elif link == 'sym': - if not (os.path.exists(dst) and os.path.samefile(src, dst)): - os.symlink(src, dst) - return (dst, 1) - - # Otherwise (non-Mac, not linking), copy the file contents and - # (optionally) copy the times and mode. - _copy_file_contents(src, dst) - if preserve_mode or preserve_times: - st = os.stat(src) - - # According to David Ascher <da@ski.org>, utime() should be done - # before chmod() (at least under NT). - if preserve_times: - os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) - if preserve_mode: - os.chmod(dst, S_IMODE(st[ST_MODE])) - - return (dst, 1) - - -# XXX I suspect this is Unix-specific -- need porting help! -def move_file (src, dst, - verbose=1, - dry_run=0): - - """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will - be moved into it with the same name; otherwise, 'src' is just renamed - to 'dst'. Return the new full name of the file. - - Handles cross-device moves on Unix using 'copy_file()'. What about - other systems??? - """ - from os.path import exists, isfile, isdir, basename, dirname - import errno - - if verbose >= 1: - log.info("moving %s -> %s", src, dst) - - if dry_run: - return dst - - if not isfile(src): - raise DistutilsFileError("can't move '%s': not a regular file" % src) - - if isdir(dst): - dst = os.path.join(dst, basename(src)) - elif exists(dst): - raise DistutilsFileError( - "can't move '%s': destination '%s' already exists" % - (src, dst)) - - if not isdir(dirname(dst)): - raise DistutilsFileError( - "can't move '%s': destination '%s' not a valid path" % - (src, dst)) - - copy_it = False - try: - os.rename(src, dst) - except OSError as e: - (num, msg) = e.args - if num == errno.EXDEV: - copy_it = True - else: - raise DistutilsFileError( - "couldn't move '%s' to '%s': %s" % (src, dst, msg)) - - if copy_it: - copy_file(src, dst, verbose=verbose) - try: - os.unlink(src) - except OSError as e: - (num, msg) = e.args - try: - os.unlink(dst) - except OSError: - pass - raise DistutilsFileError( - "couldn't move '%s' to '%s' by copy/delete: " - "delete '%s' failed: %s" - % (src, dst, src, msg)) - return dst - - -def write_file (filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - f = open(filename, "w") - try: - for line in contents: - f.write(line + "\n") - finally: - f.close() diff --git a/Lib/distutils/filelist.py b/Lib/distutils/filelist.py deleted file mode 100644 index c92d5fdba393bb..00000000000000 --- a/Lib/distutils/filelist.py +++ /dev/null @@ -1,327 +0,0 @@ -"""distutils.filelist - -Provides the FileList class, used for poking about the filesystem -and building lists of files. -""" - -import os, re -import fnmatch -import functools -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsInternalError -from distutils import log - -class FileList: - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. - - Instance attributes: - dir - directory from which files will be taken -- only used if - 'allfiles' not supplied to constructor - files - list of filenames currently being built/filtered/manipulated - allfiles - complete list of files under consideration (ie. without any - filtering applied) - """ - - def __init__(self, warn=None, debug_print=None): - # ignore argument to FileList, but keep them for backwards - # compatibility - self.allfiles = None - self.files = [] - - def set_allfiles(self, allfiles): - self.allfiles = allfiles - - def findall(self, dir=os.curdir): - self.allfiles = findall(dir) - - def debug_print(self, msg): - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - from distutils.debug import DEBUG - if DEBUG: - print(msg) - - # -- List-like methods --------------------------------------------- - - def append(self, item): - self.files.append(item) - - def extend(self, items): - self.files.extend(items) - - def sort(self): - # Not a strict lexical sort! - sortable_files = sorted(map(os.path.split, self.files)) - self.files = [] - for sort_tuple in sortable_files: - self.files.append(os.path.join(*sort_tuple)) - - - # -- Other miscellaneous utility methods --------------------------- - - def remove_duplicates(self): - # Assumes list has been sorted! - for i in range(len(self.files) - 1, 0, -1): - if self.files[i] == self.files[i - 1]: - del self.files[i] - - - # -- "File template" methods --------------------------------------- - - def _parse_template_line(self, line): - words = line.split() - action = words[0] - - patterns = dir = dir_pattern = None - - if action in ('include', 'exclude', - 'global-include', 'global-exclude'): - if len(words) < 2: - raise DistutilsTemplateError( - "'%s' expects <pattern1> <pattern2> ..." % action) - patterns = [convert_path(w) for w in words[1:]] - elif action in ('recursive-include', 'recursive-exclude'): - if len(words) < 3: - raise DistutilsTemplateError( - "'%s' expects <dir> <pattern1> <pattern2> ..." % action) - dir = convert_path(words[1]) - patterns = [convert_path(w) for w in words[2:]] - elif action in ('graft', 'prune'): - if len(words) != 2: - raise DistutilsTemplateError( - "'%s' expects a single <dir_pattern>" % action) - dir_pattern = convert_path(words[1]) - else: - raise DistutilsTemplateError("unknown action '%s'" % action) - - return (action, patterns, dir, dir_pattern) - - def process_template_line(self, line): - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dir_pattern). - (action, patterns, dir, dir_pattern) = self._parse_template_line(line) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - if action == 'include': - self.debug_print("include " + ' '.join(patterns)) - for pattern in patterns: - if not self.include_pattern(pattern, anchor=1): - log.warn("warning: no files found matching '%s'", - pattern) - - elif action == 'exclude': - self.debug_print("exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=1): - log.warn(("warning: no previously-included files " - "found matching '%s'"), pattern) - - elif action == 'global-include': - self.debug_print("global-include " + ' '.join(patterns)) - for pattern in patterns: - if not self.include_pattern(pattern, anchor=0): - log.warn(("warning: no files found matching '%s' " - "anywhere in distribution"), pattern) - - elif action == 'global-exclude': - self.debug_print("global-exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=0): - log.warn(("warning: no previously-included files matching " - "'%s' found anywhere in distribution"), - pattern) - - elif action == 'recursive-include': - self.debug_print("recursive-include %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.include_pattern(pattern, prefix=dir): - log.warn(("warning: no files found matching '%s' " - "under directory '%s'"), - pattern, dir) - - elif action == 'recursive-exclude': - self.debug_print("recursive-exclude %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.exclude_pattern(pattern, prefix=dir): - log.warn(("warning: no previously-included files matching " - "'%s' found under directory '%s'"), - pattern, dir) - - elif action == 'graft': - self.debug_print("graft " + dir_pattern) - if not self.include_pattern(None, prefix=dir_pattern): - log.warn("warning: no directories found matching '%s'", - dir_pattern) - - elif action == 'prune': - self.debug_print("prune " + dir_pattern) - if not self.exclude_pattern(None, prefix=dir_pattern): - log.warn(("no previously-included directories found " - "matching '%s'"), dir_pattern) - else: - raise DistutilsInternalError( - "this cannot happen: invalid action '%s'" % action) - - - # -- Filtering/selection methods ----------------------------------- - - def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): - """Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. Patterns - are not quite the same as implemented by the 'fnmatch' module: '*' - and '?' match non-special characters, where "special" is platform- - dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found, False otherwise. - """ - # XXX docstring lying about what the special chars are? - files_found = False - pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("include_pattern: applying regex r'%s'" % - pattern_re.pattern) - - # delayed loading of allfiles list - if self.allfiles is None: - self.findall() - - for name in self.allfiles: - if pattern_re.search(name): - self.debug_print(" adding " + name) - self.files.append(name) - files_found = True - return files_found - - - def exclude_pattern (self, pattern, - anchor=1, prefix=None, is_regex=0): - """Remove strings (presumably filenames) from 'files' that match - 'pattern'. Other parameters are the same as for - 'include_pattern()', above. - The list 'self.files' is modified in place. - Return True if files are found, False otherwise. - """ - files_found = False - pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("exclude_pattern: applying regex r'%s'" % - pattern_re.pattern) - for i in range(len(self.files)-1, -1, -1): - if pattern_re.search(self.files[i]): - self.debug_print(" removing " + self.files[i]) - del self.files[i] - files_found = True - return files_found - - -# ---------------------------------------------------------------------- -# Utility functions - -def _find_all_simple(path): - """ - Find all files under 'path' - """ - results = ( - os.path.join(base, file) - for base, dirs, files in os.walk(path, followlinks=True) - for file in files - ) - return filter(os.path.isfile, results) - - -def findall(dir=os.curdir): - """ - Find all files under 'dir' and return the list of full filenames. - Unless dir is '.', return full filenames with dir prepended. - """ - files = _find_all_simple(dir) - if dir == os.curdir: - make_rel = functools.partial(os.path.relpath, start=dir) - files = map(make_rel, files) - return list(files) - - -def glob_to_re(pattern): - """Translate a shell-like glob pattern to a regular expression; return - a string containing the regex. Differs from 'fnmatch.translate()' in - that '*' does not match "special characters" (which are - platform-specific). - """ - pattern_re = fnmatch.translate(pattern) - - # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which - # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, - # and by extension they shouldn't match such "special characters" under - # any OS. So change all non-escaped dots in the RE to match any - # character except the special characters (currently: just os.sep). - sep = os.sep - if os.sep == '\\': - # we're using a regex to manipulate a regex, so we need - # to escape the backslash twice - sep = r'\\\\' - escaped = r'\1[^%s]' % sep - pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re) - return pattern_re - - -def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0): - """Translate a shell-like wildcard pattern to a compiled regular - expression. Return the compiled regex. If 'is_regex' true, - then 'pattern' is directly compiled to a regex (if it's a string) - or just returned as-is (assumes it's a regex object). - """ - if is_regex: - if isinstance(pattern, str): - return re.compile(pattern) - else: - return pattern - - # ditch start and end characters - start, _, end = glob_to_re('_').partition('_') - - if pattern: - pattern_re = glob_to_re(pattern) - assert pattern_re.startswith(start) and pattern_re.endswith(end) - else: - pattern_re = '' - - if prefix is not None: - prefix_re = glob_to_re(prefix) - assert prefix_re.startswith(start) and prefix_re.endswith(end) - prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] - sep = os.sep - if os.sep == '\\': - sep = r'\\' - pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] - pattern_re = r'%s\A%s%s.*%s%s' % (start, prefix_re, sep, pattern_re, end) - else: # no prefix -- respect anchor flag - if anchor: - pattern_re = r'%s\A%s' % (start, pattern_re[len(start):]) - - return re.compile(pattern_re) diff --git a/Lib/distutils/log.py b/Lib/distutils/log.py deleted file mode 100644 index 8ef6b28ea2ec09..00000000000000 --- a/Lib/distutils/log.py +++ /dev/null @@ -1,77 +0,0 @@ -"""A simple log mechanism styled after PEP 282.""" - -# The class here is styled after PEP 282 so that it could later be -# replaced with a standard Python logging implementation. - -DEBUG = 1 -INFO = 2 -WARN = 3 -ERROR = 4 -FATAL = 5 - -import sys - -class Log: - - def __init__(self, threshold=WARN): - self.threshold = threshold - - def _log(self, level, msg, args): - if level not in (DEBUG, INFO, WARN, ERROR, FATAL): - raise ValueError('%s wrong log level' % str(level)) - - if level >= self.threshold: - if args: - msg = msg % args - if level in (WARN, ERROR, FATAL): - stream = sys.stderr - else: - stream = sys.stdout - try: - stream.write('%s\n' % msg) - except UnicodeEncodeError: - # emulate backslashreplace error handler - encoding = stream.encoding - msg = msg.encode(encoding, "backslashreplace").decode(encoding) - stream.write('%s\n' % msg) - stream.flush() - - def log(self, level, msg, *args): - self._log(level, msg, args) - - def debug(self, msg, *args): - self._log(DEBUG, msg, args) - - def info(self, msg, *args): - self._log(INFO, msg, args) - - def warn(self, msg, *args): - self._log(WARN, msg, args) - - def error(self, msg, *args): - self._log(ERROR, msg, args) - - def fatal(self, msg, *args): - self._log(FATAL, msg, args) - -_global_log = Log() -log = _global_log.log -debug = _global_log.debug -info = _global_log.info -warn = _global_log.warn -error = _global_log.error -fatal = _global_log.fatal - -def set_threshold(level): - # return the old threshold for use from tests - old = _global_log.threshold - _global_log.threshold = level - return old - -def set_verbosity(v): - if v <= 0: - set_threshold(WARN) - elif v == 1: - set_threshold(INFO) - elif v >= 2: - set_threshold(DEBUG) diff --git a/Lib/distutils/msvc9compiler.py b/Lib/distutils/msvc9compiler.py deleted file mode 100644 index a7976fbe3ed924..00000000000000 --- a/Lib/distutils/msvc9compiler.py +++ /dev/null @@ -1,788 +0,0 @@ -"""distutils.msvc9compiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio 2008. - -The module is compatible with VS 2005 and VS 2008. You can find legacy support -for older versions of VS in distutils.msvccompiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS2005 and VS 2008 by Christian Heimes - -import os -import subprocess -import sys -import re - -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform - -import winreg - -RegOpenKeyEx = winreg.OpenKeyEx -RegEnumKey = winreg.EnumKey -RegEnumValue = winreg.EnumValue -RegError = winreg.error - -HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) - -NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) -if NATIVE_WIN64: - # Visual C++ is a 32-bit application, so we need to look in - # the corresponding registry branch, if we're running a - # 64-bit Python on Win64 - VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" -else: - VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Microsoft\.NETFramework" - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is -# the param to cross-compile on x86 targeting amd64.) -PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'amd64', -} - -class Reg: - """Helper class to read values from the registry - """ - - def get_value(cls, path, key): - for base in HKEYS: - d = cls.read_values(base, path) - if d and key in d: - return d[key] - raise KeyError(key) - get_value = classmethod(get_value) - - def read_keys(cls, base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - read_keys = classmethod(read_keys) - - def read_values(cls, base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) - i += 1 - return d - read_values = classmethod(read_values) - - def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - convert_mbcs = staticmethod(convert_mbcs) - -class MacroExpander: - - def __init__(self, version): - self.macros = {} - self.vsbase = VS_BASE % version - self.load_macros(version) - - def set_macro(self, macro, path, key): - self.macros["$(%s)" % macro] = Reg.get_value(path, key) - - def load_macros(self, version): - self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") - self.set_macro("FrameworkDir", NET_BASE, "installroot") - try: - if version >= 8.0: - self.set_macro("FrameworkSDKDir", NET_BASE, - "sdkinstallrootv2.0") - else: - raise KeyError("sdkinstallrootv2.0") - except KeyError: - raise DistutilsPlatformError( - """Python was built with Visual Studio 2008; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2008 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") - - if version >= 9.0: - self.set_macro("FrameworkVersion", self.vsbase, "clr version") - self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") - else: - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = Reg.get_value(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - -def removeDuplicates(variable): - """Remove duplicate values of an environment variable. - """ - oldList = variable.split(os.pathsep) - newList = [] - for i in oldList: - if i not in newList: - newList.append(i) - newVariable = os.pathsep.join(newList) - return newVariable - -def find_vcvarsall(version): - """Find the vcvarsall.bat file - - At first it tries to find the productdir of VS 2008 in the registry. If - that fails it falls back to the VS90COMNTOOLS env var. - """ - vsbase = VS_BASE % version - try: - productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, - "productdir") - except KeyError: - log.debug("Unable to find productdir in registry") - productdir = None - - if not productdir or not os.path.isdir(productdir): - toolskey = "VS%0.f0COMNTOOLS" % version - toolsdir = os.environ.get(toolskey, None) - - if toolsdir and os.path.isdir(toolsdir): - productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") - productdir = os.path.abspath(productdir) - if not os.path.isdir(productdir): - log.debug("%s is not a valid directory" % productdir) - return None - else: - log.debug("Env var %s is not set or invalid" % toolskey) - if not productdir: - log.debug("No productdir found") - return None - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - log.debug("Unable to find vcvarsall.bat") - return None - -def query_vcvarsall(version, arch="x86"): - """Launch vcvarsall.bat and read the settings from its environment - """ - vcvarsall = find_vcvarsall(version) - interesting = {"include", "lib", "libpath", "path"} - result = {} - - if vcvarsall is None: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") - log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) - popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - try: - stdout, stderr = popen.communicate() - if popen.wait() != 0: - raise DistutilsPlatformError(stderr.decode("mbcs")) - - stdout = stdout.decode("mbcs") - for line in stdout.split("\n"): - line = Reg.convert_mbcs(line) - if '=' not in line: - continue - line = line.strip() - key, value = line.split('=', 1) - key = key.lower() - if key in interesting: - if value.endswith(os.pathsep): - value = value[:-1] - result[key] = removeDuplicates(value) - - finally: - popen.stdout.close() - popen.stderr.close() - - if len(result) != len(interesting): - raise ValueError(str(list(result.keys()))) - - return result - -# More globals -VERSION = get_build_version() -if VERSION < 8.0: - raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION) -# MACROS = MacroExpander(VERSION) - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) - self.__version = VERSION - self.__root = r"Software\Microsoft\VisualStudio" - # self.__macros = MACROS - self.__paths = [] - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.__arch = None # deprecated name - self.initialized = False - - def initialize(self, plat_name=None): - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - ok_plats = 'win32', 'win-amd64' - if plat_name not in ok_plats: - raise DistutilsPlatformError("--plat-name must be one of %s" % - (ok_plats,)) - - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; - # to cross compile, you use 'x86_amd64'. - # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross - # compile use 'x86' (ie, it runs the x86 compiler directly) - if plat_name == get_platform() or plat_name == 'win32': - # native build or cross-compile to win32 - plat_spec = PLAT_TO_VCVARS[plat_name] - else: - # cross compile from win32 -> some 64bit - plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ - PLAT_TO_VCVARS[plat_name] - - vc_env = query_vcvarsall(VERSION, plat_spec) - - self.__paths = vc_env['path'].split(os.pathsep) - os.environ['lib'] = vc_env['lib'] - os.environ['include'] = vc_env['include'] - - if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - #self.set_path_env_var('lib') - #self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "x86": - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename (base) - if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append ('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - self.manifest_setup_ldargs(output_filename, build_temp, ld_args) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - # embed the manifest - # XXX - this is somewhat fragile - if mt.exe fails, distutils - # will still consider the DLL up-to-date, but it will not have a - # manifest. Maybe we should link to a temp file? OTOH, that - # implies a build environment error that shouldn't go undetected. - mfinfo = self.manifest_get_embed_info(target_desc, ld_args) - if mfinfo is not None: - mffilename, mfid = mfinfo - out_arg = '-outputresource:%s;%s' % (output_filename, mfid) - try: - self.spawn(['mt.exe', '-nologo', '-manifest', - mffilename, out_arg]) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): - # If we need a manifest at all, an embedded manifest is recommended. - # See MSDN article titled - # "How to: Embed a Manifest Inside a C/C++ Application" - # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) - # Ask the linker to generate the manifest in the temp dir, so - # we can check it, and possibly embed it, later. - temp_manifest = os.path.join( - build_temp, - os.path.basename(output_filename) + ".manifest") - ld_args.append('/MANIFESTFILE:' + temp_manifest) - - def manifest_get_embed_info(self, target_desc, ld_args): - # If a manifest should be embedded, return a tuple of - # (manifest_filename, resource_id). Returns None if no manifest - # should be embedded. See http://bugs.python.org/issue7833 for why - # we want to avoid any manifest for extension modules if we can. - for arg in ld_args: - if arg.startswith("/MANIFESTFILE:"): - temp_manifest = arg.split(":", 1)[1] - break - else: - # no /MANIFESTFILE so nothing to do. - return None - if target_desc == CCompiler.EXECUTABLE: - # by default, executables always get the manifest with the - # CRT referenced. - mfid = 1 - else: - # Extension modules try and avoid any manifest if possible. - mfid = 2 - temp_manifest = self._remove_visual_c_ref(temp_manifest) - if temp_manifest is None: - return None - return temp_manifest, mfid - - def _remove_visual_c_ref(self, manifest_file): - try: - # Remove references to the Visual C runtime, so they will - # fall through to the Visual C dependency of Python.exe. - # This way, when installed for a restricted user (e.g. - # runtimes are not in WinSxS folder, but in Python's own - # folder), the runtimes do not need to be in every folder - # with .pyd's. - # Returns either the filename of the modified manifest or - # None if no manifest should be embedded. - manifest_f = open(manifest_file) - try: - manifest_buf = manifest_f.read() - finally: - manifest_f.close() - pattern = re.compile( - r"""<assemblyIdentity.*?name=("|')Microsoft\."""\ - r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""", - re.DOTALL) - manifest_buf = re.sub(pattern, "", manifest_buf) - pattern = r"<dependentAssembly>\s*</dependentAssembly>" - manifest_buf = re.sub(pattern, "", manifest_buf) - # Now see if any other assemblies are referenced - if not, we - # don't want a manifest embedded. - pattern = re.compile( - r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')""" - r""".*?(?:/>|</assemblyIdentity>)""", re.DOTALL) - if re.search(pattern, manifest_buf) is None: - return None - - manifest_f = open(manifest_file, 'w') - try: - manifest_f.write(manifest_buf) - return manifest_file - finally: - manifest_f.close() - except OSError: - pass - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) - if os.path.isfile(fn): - return fn - - return exe diff --git a/Lib/distutils/msvccompiler.py b/Lib/distutils/msvccompiler.py deleted file mode 100644 index f0d04fdb7f4178..00000000000000 --- a/Lib/distutils/msvccompiler.py +++ /dev/null @@ -1,642 +0,0 @@ -"""distutils.msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) - -import sys, os -from distutils.errors import \ - DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import \ - CCompiler, gen_lib_options -from distutils import log - -_can_read_reg = False -try: - import winreg - - _can_read_reg = True - hkey_mod = winreg - - RegOpenKeyEx = winreg.OpenKeyEx - RegEnumKey = winreg.EnumKey - RegEnumValue = winreg.EnumValue - RegError = winreg.error - -except ImportError: - try: - import win32api - import win32con - _can_read_reg = True - hkey_mod = win32con - - RegOpenKeyEx = win32api.RegOpenKeyEx - RegEnumKey = win32api.RegEnumKey - RegEnumValue = win32api.RegEnumValue - RegError = win32api.error - except ImportError: - log.info("Warning: Can't read registry to find the " - "necessary compiler setting\n" - "Make sure that Python modules winreg, " - "win32api or win32con are installed.") - -if _can_read_reg: - HKEYS = (hkey_mod.HKEY_USERS, - hkey_mod.HKEY_CURRENT_USER, - hkey_mod.HKEY_LOCAL_MACHINE, - hkey_mod.HKEY_CLASSES_ROOT) - -def read_keys(base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - -def read_values(base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[convert_mbcs(name)] = convert_mbcs(value) - i += 1 - return d - -def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - -class MacroExpander: - def __init__(self, version): - self.macros = {} - self.load_macros(version) - - def set_macro(self, macro, path, key): - for base in HKEYS: - d = read_values(base, path) - if d: - self.macros["$(%s)" % macro] = d[key] - break - - def load_macros(self, version): - vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version - self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") - net = r"Software\Microsoft\.NETFramework" - self.set_macro("FrameworkDir", net, "installroot") - try: - if version > 7.0: - self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") - else: - self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") - except KeyError as exc: # - raise DistutilsPlatformError( - """Python was built with Visual Studio 2003; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2003 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") - - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = read_values(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - -def get_build_architecture(): - """Return the processor architecture. - - Possible results are "Intel" or "AMD64". - """ - - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return "Intel" - j = sys.version.find(")", i) - return sys.version[i+len(prefix):j] - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) - self.__version = get_build_version() - self.__arch = get_build_architecture() - if self.__arch == "Intel": - # x86 - if self.__version >= 7: - self.__root = r"Software\Microsoft\VisualStudio" - self.__macros = MacroExpander(self.__version) - else: - self.__root = r"Software\Microsoft\Devstudio" - self.__product = "Visual Studio version %s" % self.__version - else: - # Win64. Assume this was built with the platform SDK - self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) - - self.initialized = False - - def initialize(self): - self.__paths = [] - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - self.__paths = self.get_msvc_paths("path") - - if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - self.set_path_env_var('lib') - self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "Intel": - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - else: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename (base) - if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append ('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - os.path.dirname(objects[0]), - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) - if os.path.isfile(fn): - return fn - - return exe - - def get_msvc_paths(self, path, platform='x86'): - """Get a list of devstudio directories (include, lib or path). - - Return a list of strings. The list will be empty if unable to - access the registry or appropriate registry keys not found. - """ - if not _can_read_reg: - return [] - - path = path + " dirs" - if self.__version >= 7: - key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" - % (self.__root, self.__version)) - else: - key = (r"%s\6.0\Build System\Components\Platforms" - r"\Win32 (%s)\Directories" % (self.__root, platform)) - - for base in HKEYS: - d = read_values(base, key) - if d: - if self.__version >= 7: - return self.__macros.sub(d[path]).split(";") - else: - return d[path].split(";") - # MSVC 6 seems to create the registry entries we need only when - # the GUI is run. - if self.__version == 6: - for base in HKEYS: - if read_values(base, r"%s\6.0" % self.__root) is not None: - self.warn("It seems you have Visual Studio 6 installed, " - "but the expected registry settings are not present.\n" - "You must at least run the Visual Studio GUI once " - "so that these entries are created.") - break - return [] - - def set_path_env_var(self, name): - """Set environment variable 'name' to an MSVC path type value. - - This is equivalent to a SET command prior to execution of spawned - commands. - """ - - if name == "lib": - p = self.get_msvc_paths("library") - else: - p = self.get_msvc_paths(name) - if p: - os.environ[name] = ';'.join(p) - - -if get_build_version() >= 8.0: - log.debug("Importing new compiler from distutils.msvc9compiler") - OldMSVCCompiler = MSVCCompiler - from distutils.msvc9compiler import MSVCCompiler - # get_build_architecture not really relevant now we support cross-compile - from distutils.msvc9compiler import MacroExpander diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py deleted file mode 100644 index 31df3f7faca552..00000000000000 --- a/Lib/distutils/spawn.py +++ /dev/null @@ -1,129 +0,0 @@ -"""distutils.spawn - -Provides the 'spawn()' function, a front-end to various platform- -specific functions for launching another program in a sub-process. -Also provides the 'find_executable()' to search the path for a given -executable name. -""" - -import sys -import os -import subprocess - -from distutils.errors import DistutilsPlatformError, DistutilsExecError -from distutils.debug import DEBUG -from distutils import log - - -if sys.platform == 'darwin': - _cfg_target = None - _cfg_target_split = None - - -def spawn(cmd, search_path=1, verbose=0, dry_run=0): - """Run another program, specified as a command list 'cmd', in a new process. - - 'cmd' is just the argument list for the new process, ie. - cmd[0] is the program to run and cmd[1:] are the rest of its arguments. - There is no way to run a program with a name different from that of its - executable. - - If 'search_path' is true (the default), the system's executable - search path will be used to find the program; otherwise, cmd[0] - must be the exact path to the executable. If 'dry_run' is true, - the command will not actually be run. - - Raise DistutilsExecError if running the program fails in any way; just - return on success. - """ - # cmd is documented as a list, but just in case some code passes a tuple - # in, protect our %-formatting code against horrible death - cmd = list(cmd) - - log.info(' '.join(cmd)) - if dry_run: - return - - if search_path: - executable = find_executable(cmd[0]) - if executable is not None: - cmd[0] = executable - - env = None - if sys.platform == 'darwin': - global _cfg_target, _cfg_target_split - if _cfg_target is None: - from distutils import sysconfig - _cfg_target = sysconfig.get_config_var( - 'MACOSX_DEPLOYMENT_TARGET') or '' - if _cfg_target: - _cfg_target_split = [int(x) for x in _cfg_target.split('.')] - if _cfg_target: - # Ensure that the deployment target of the build process is not - # less than 10.3 if the interpreter was built for 10.3 or later. - # This ensures extension modules are built with correct - # compatibility values, specifically LDSHARED which can use - # '-undefined dynamic_lookup' which only works on >= 10.3. - cur_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target) - cur_target_split = [int(x) for x in cur_target.split('.')] - if _cfg_target_split[:2] >= [10, 3] and cur_target_split[:2] < [10, 3]: - my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: ' - 'now "%s" but "%s" during configure;' - 'must use 10.3 or later' - % (cur_target, _cfg_target)) - raise DistutilsPlatformError(my_msg) - env = dict(os.environ, - MACOSX_DEPLOYMENT_TARGET=cur_target) - - try: - proc = subprocess.Popen(cmd, env=env) - proc.wait() - exitcode = proc.returncode - except OSError as exc: - if not DEBUG: - cmd = cmd[0] - raise DistutilsExecError( - "command %r failed: %s" % (cmd, exc.args[-1])) from exc - - if exitcode: - if not DEBUG: - cmd = cmd[0] - raise DistutilsExecError( - "command %r failed with exit code %s" % (cmd, exitcode)) - - -def find_executable(executable, path=None): - """Tries to find 'executable' in the directories listed in 'path'. - - A string listing directories separated by 'os.pathsep'; defaults to - os.environ['PATH']. Returns the complete filename or None if not found. - """ - _, ext = os.path.splitext(executable) - if (sys.platform == 'win32') and (ext != '.exe'): - executable = executable + '.exe' - - if os.path.isfile(executable): - return executable - - if path is None: - path = os.environ.get('PATH', None) - if path is None: - try: - path = os.confstr("CS_PATH") - except (AttributeError, ValueError): - # os.confstr() or CS_PATH is not available - path = os.defpath - # bpo-35755: Don't use os.defpath if the PATH environment variable is - # set to an empty string - - # PATH='' doesn't match, whereas PATH=':' looks in the current directory - if not path: - return None - - paths = path.split(os.pathsep) - for p in paths: - f = os.path.join(p, executable) - if os.path.isfile(f): - # the file exists, we have a shot at spawn working - return f - return None diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py deleted file mode 100644 index 8eada987c13819..00000000000000 --- a/Lib/distutils/sysconfig.py +++ /dev/null @@ -1,345 +0,0 @@ -"""Provide access to Python's configuration information. The specific -configuration variables available depend heavily on the platform and -configuration. The values may be retrieved using -get_config_var(name), and the list of variables is available via -get_config_vars().keys(). Additional convenience functions are also -available. - -Written by: Fred L. Drake, Jr. -Email: <fdrake@acm.org> -""" - -import os -import re -import sys -import warnings - -from functools import partial - -from .errors import DistutilsPlatformError - -from sysconfig import ( - _PREFIX as PREFIX, - _BASE_PREFIX as BASE_PREFIX, - _EXEC_PREFIX as EXEC_PREFIX, - _BASE_EXEC_PREFIX as BASE_EXEC_PREFIX, - _PROJECT_BASE as project_base, - _PYTHON_BUILD as python_build, - _init_posix as sysconfig_init_posix, - parse_config_h as sysconfig_parse_config_h, - - _init_non_posix, - - _variable_rx, - _findvar1_rx, - _findvar2_rx, - - expand_makefile_vars, - is_python_build, - get_config_h_filename, - get_config_var, - get_config_vars, - get_makefile_filename, - get_python_version, -) - -# This is better than -# from sysconfig import _CONFIG_VARS as _config_vars -# because it makes sure that the global dictionary is initialized -# which might not be true in the time of import. -_config_vars = get_config_vars() - -warnings.warn( - 'The distutils.sysconfig module is deprecated, use sysconfig instead', - DeprecationWarning, - stacklevel=2 -) - - -# Following functions are the same as in sysconfig but with different API -def parse_config_h(fp, g=None): - return sysconfig_parse_config_h(fp, vars=g) - - -_python_build = partial(is_python_build, check_home=True) -_init_posix = partial(sysconfig_init_posix, _config_vars) -_init_nt = partial(_init_non_posix, _config_vars) - - -# Similar function is also implemented in sysconfig as _parse_makefile -# but without the parsing capabilities of distutils.text_file.TextFile. -def parse_makefile(fn, g=None): - """Parse a Makefile-style file. - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - from distutils.text_file import TextFile - fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") - - if g is None: - g = {} - done = {} - notdone = {} - - while True: - line = fp.readline() - if line is None: # eof - break - m = re.match(_variable_rx, line) - if m: - n, v = m.group(1, 2) - v = v.strip() - # `$$' is a literal `$' in make - tmpv = v.replace('$$', '') - - if "$" in tmpv: - notdone[n] = v - else: - try: - v = int(v) - except ValueError: - # insert literal `$' - done[n] = v.replace('$$', '$') - else: - done[n] = v - - # Variables with a 'PY_' prefix in the makefile. These need to - # be made available without that prefix through sysconfig. - # Special care is needed to ensure that variable expansion works, even - # if the expansion uses the name without a prefix. - renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') - - # do variable interpolation here - while notdone: - for name in list(notdone): - value = notdone[name] - m = re.search(_findvar1_rx, value) or re.search(_findvar2_rx, value) - if m: - n = m.group(1) - found = True - if n in done: - item = str(done[n]) - elif n in notdone: - # get it on a subsequent round - found = False - elif n in os.environ: - # do it like make: fall back to environment - item = os.environ[n] - - elif n in renamed_variables: - if name.startswith('PY_') and name[3:] in renamed_variables: - item = "" - - elif 'PY_' + n in notdone: - found = False - - else: - item = str(done['PY_' + n]) - else: - done[n] = item = "" - if found: - after = value[m.end():] - value = value[:m.start()] + item + after - if "$" in after: - notdone[name] = value - else: - try: value = int(value) - except ValueError: - done[name] = value.strip() - else: - done[name] = value - del notdone[name] - - if name.startswith('PY_') \ - and name[3:] in renamed_variables: - - name = name[3:] - if name not in done: - done[name] = value - else: - # bogus variable reference; just drop it since we can't deal - del notdone[name] - - fp.close() - - # strip spurious spaces - for k, v in done.items(): - if isinstance(v, str): - done[k] = v.strip() - - # save the results in the global dictionary - g.update(done) - return g - - -# Following functions are deprecated together with this module and they -# have no direct replacement - -# Calculate the build qualifier flags if they are defined. Adding the flags -# to the include and lib directories only makes sense for an installation, not -# an in-source build. -build_flags = '' -try: - if not python_build: - build_flags = sys.abiflags -except AttributeError: - # It's not a configure-based build, so the sys module doesn't have - # this attribute, which is fine. - pass - - -def customize_compiler(compiler): - """Do any platform-specific customization of a CCompiler instance. - - Mainly needed on Unix, so we can plug in the information that - varies across Unices and is stored in Python's Makefile. - """ - if compiler.compiler_type == "unix": - if sys.platform == "darwin": - # Perform first-time customization of compiler-related - # config vars on OS X now that we know we need a compiler. - # This is primarily to support Pythons from binary - # installers. The kind and paths to build tools on - # the user system may vary significantly from the system - # that Python itself was built on. Also the user OS - # version and build tools may not support the same set - # of CPU architectures for universal builds. - if not _config_vars.get('CUSTOMIZED_OSX_COMPILER'): - import _osx_support - _osx_support.customize_compiler(_config_vars) - _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' - - (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ - get_config_vars('CC', 'CXX', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') - - if 'CC' in os.environ: - newcc = os.environ['CC'] - if (sys.platform == 'darwin' - and 'LDSHARED' not in os.environ - and ldshared.startswith(cc)): - # On OS X, if CC is overridden, use that as the default - # command for LDSHARED as well - ldshared = newcc + ldshared[len(cc):] - cc = newcc - if 'CXX' in os.environ: - cxx = os.environ['CXX'] - if 'LDSHARED' in os.environ: - ldshared = os.environ['LDSHARED'] - if 'CPP' in os.environ: - cpp = os.environ['CPP'] - else: - cpp = cc + " -E" # not always - if 'LDFLAGS' in os.environ: - ldshared = ldshared + ' ' + os.environ['LDFLAGS'] - if 'CFLAGS' in os.environ: - cflags = cflags + ' ' + os.environ['CFLAGS'] - ldshared = ldshared + ' ' + os.environ['CFLAGS'] - if 'CPPFLAGS' in os.environ: - cpp = cpp + ' ' + os.environ['CPPFLAGS'] - cflags = cflags + ' ' + os.environ['CPPFLAGS'] - ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] - if 'AR' in os.environ: - ar = os.environ['AR'] - if 'ARFLAGS' in os.environ: - archiver = ar + ' ' + os.environ['ARFLAGS'] - else: - archiver = ar + ' ' + ar_flags - - cc_cmd = cc + ' ' + cflags - compiler.set_executables( - preprocessor=cpp, - compiler=cc_cmd, - compiler_so=cc_cmd + ' ' + ccshared, - compiler_cxx=cxx, - linker_so=ldshared, - linker_exe=cc, - archiver=archiver) - - compiler.shared_lib_extension = shlib_suffix - - -def get_python_inc(plat_specific=0, prefix=None): - """Return the directory containing installed Python header files. - - If 'plat_specific' is false (the default), this is the path to the - non-platform-specific header files, i.e. Python.h and so on; - otherwise, this is the path to platform-specific header files - (namely pyconfig.h). - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - if prefix is None: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - if os.name == "posix": - if python_build: - # Assume the executable is in the build directory. The - # pyconfig.h file should be in the same directory. Since - # the build directory may not be the source directory, we - # must use "srcdir" from the makefile to find the "Include" - # directory. - if plat_specific: - return project_base - else: - incdir = os.path.join(get_config_var('srcdir'), 'Include') - return os.path.normpath(incdir) - python_dir = 'python' + get_python_version() + build_flags - return os.path.join(prefix, "include", python_dir) - elif os.name == "nt": - if python_build: - # Include both the include and PC dir to ensure we can find - # pyconfig.h - return (os.path.join(prefix, "include") + os.path.pathsep + - os.path.join(prefix, "PC")) - return os.path.join(prefix, "include") - else: - raise DistutilsPlatformError( - "I don't know where Python installs its C header files " - "on platform '%s'" % os.name) - - -def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): - """Return the directory containing the Python library (standard or - site additions). - - If 'plat_specific' is true, return the directory containing - platform-specific modules, i.e. any module from a non-pure-Python - module distribution; otherwise, return the platform-shared library - directory. If 'standard_lib' is true, return the directory - containing standard Python library modules; otherwise, return the - directory for site-specific modules. - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - if prefix is None: - if standard_lib: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - else: - prefix = plat_specific and EXEC_PREFIX or PREFIX - - if os.name == "posix": - if plat_specific or standard_lib: - # Platform-specific modules (any module from a non-pure-Python - # module distribution) or standard Python library modules. - libdir = sys.platlibdir - else: - # Pure Python - libdir = "lib" - libpython = os.path.join(prefix, libdir, - "python" + get_python_version()) - if standard_lib: - return libpython - else: - return os.path.join(libpython, "site-packages") - elif os.name == "nt": - if standard_lib: - return os.path.join(prefix, "Lib") - else: - return os.path.join(prefix, "Lib", "site-packages") - else: - raise DistutilsPlatformError( - "I don't know where Python installs its library " - "on platform '%s'" % os.name) diff --git a/Lib/distutils/tests/Setup.sample b/Lib/distutils/tests/Setup.sample deleted file mode 100644 index 36c4290d8ff482..00000000000000 --- a/Lib/distutils/tests/Setup.sample +++ /dev/null @@ -1,67 +0,0 @@ -# Setup file from the pygame project - -#--StartConfig -SDL = -I/usr/include/SDL -D_REENTRANT -lSDL -FONT = -lSDL_ttf -IMAGE = -lSDL_image -MIXER = -lSDL_mixer -SMPEG = -lsmpeg -PNG = -lpng -JPEG = -ljpeg -SCRAP = -lX11 -PORTMIDI = -lportmidi -PORTTIME = -lporttime -#--EndConfig - -#DEBUG = -C-W -C-Wall -DEBUG = - -#the following modules are optional. you will want to compile -#everything you can, but you can ignore ones you don't have -#dependencies for, just comment them out - -imageext src/imageext.c $(SDL) $(IMAGE) $(PNG) $(JPEG) $(DEBUG) -font src/font.c $(SDL) $(FONT) $(DEBUG) -mixer src/mixer.c $(SDL) $(MIXER) $(DEBUG) -mixer_music src/music.c $(SDL) $(MIXER) $(DEBUG) -_numericsurfarray src/_numericsurfarray.c $(SDL) $(DEBUG) -_numericsndarray src/_numericsndarray.c $(SDL) $(MIXER) $(DEBUG) -movie src/movie.c $(SDL) $(SMPEG) $(DEBUG) -scrap src/scrap.c $(SDL) $(SCRAP) $(DEBUG) -_camera src/_camera.c src/camera_v4l2.c src/camera_v4l.c $(SDL) $(DEBUG) -pypm src/pypm.c $(SDL) $(PORTMIDI) $(PORTTIME) $(DEBUG) - -GFX = src/SDL_gfx/SDL_gfxPrimitives.c -#GFX = src/SDL_gfx/SDL_gfxBlitFunc.c src/SDL_gfx/SDL_gfxPrimitives.c -gfxdraw src/gfxdraw.c $(SDL) $(GFX) $(DEBUG) - - - -#these modules are required for pygame to run. they only require -#SDL as a dependency. these should not be altered - -base src/base.c $(SDL) $(DEBUG) -cdrom src/cdrom.c $(SDL) $(DEBUG) -color src/color.c $(SDL) $(DEBUG) -constants src/constants.c $(SDL) $(DEBUG) -display src/display.c $(SDL) $(DEBUG) -event src/event.c $(SDL) $(DEBUG) -fastevent src/fastevent.c src/fastevents.c $(SDL) $(DEBUG) -key src/key.c $(SDL) $(DEBUG) -mouse src/mouse.c $(SDL) $(DEBUG) -rect src/rect.c $(SDL) $(DEBUG) -rwobject src/rwobject.c $(SDL) $(DEBUG) -surface src/surface.c src/alphablit.c src/surface_fill.c $(SDL) $(DEBUG) -surflock src/surflock.c $(SDL) $(DEBUG) -time src/time.c $(SDL) $(DEBUG) -joystick src/joystick.c $(SDL) $(DEBUG) -draw src/draw.c $(SDL) $(DEBUG) -image src/image.c $(SDL) $(DEBUG) -overlay src/overlay.c $(SDL) $(DEBUG) -transform src/transform.c src/rotozoom.c src/scale2x.c src/scale_mmx.c $(SDL) $(DEBUG) -mask src/mask.c src/bitmask.c $(SDL) $(DEBUG) -bufferproxy src/bufferproxy.c $(SDL) $(DEBUG) -pixelarray src/pixelarray.c $(SDL) $(DEBUG) -_arraysurfarray src/_arraysurfarray.c $(SDL) $(DEBUG) - - diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py deleted file mode 100644 index 16d011fd9ee6e7..00000000000000 --- a/Lib/distutils/tests/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Test suite for distutils. - -This test suite consists of a collection of test modules in the -distutils.tests package. Each test module has a name starting with -'test' and contains a function test_suite(). The function is expected -to return an initialized unittest.TestSuite instance. - -Tests for the command classes in the distutils.command package are -included in distutils.tests as well, instead of using a separate -distutils.command.tests package, since command identification is done -by import rather than matching pre-defined names. - -""" - -import os -import sys -import unittest -from test.support import run_unittest -from test.support.warnings_helper import save_restore_warnings_filters - - -here = os.path.dirname(__file__) or os.curdir - - -def test_suite(): - suite = unittest.TestSuite() - for fn in os.listdir(here): - if fn.startswith("test") and fn.endswith(".py"): - modname = "distutils.tests." + fn[:-3] - # bpo-40055: Save/restore warnings filters to leave them unchanged. - # Importing tests imports docutils which imports pkg_resources - # which adds a warnings filter. - with save_restore_warnings_filters(): - __import__(modname) - module = sys.modules[modname] - suite.addTest(module.test_suite()) - return suite - - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/includetest.rst b/Lib/distutils/tests/includetest.rst deleted file mode 100644 index d7b4ae38b09d86..00000000000000 --- a/Lib/distutils/tests/includetest.rst +++ /dev/null @@ -1 +0,0 @@ -This should be included. diff --git a/Lib/distutils/tests/support.py b/Lib/distutils/tests/support.py deleted file mode 100644 index 23b907b607efad..00000000000000 --- a/Lib/distutils/tests/support.py +++ /dev/null @@ -1,209 +0,0 @@ -"""Support code for distutils test cases.""" -import os -import sys -import shutil -import tempfile -import unittest -import sysconfig -from copy import deepcopy -from test.support import os_helper - -from distutils import log -from distutils.log import DEBUG, INFO, WARN, ERROR, FATAL -from distutils.core import Distribution - - -class LoggingSilencer(object): - - def setUp(self): - super().setUp() - self.threshold = log.set_threshold(log.FATAL) - # catching warnings - # when log will be replaced by logging - # we won't need such monkey-patch anymore - self._old_log = log.Log._log - log.Log._log = self._log - self.logs = [] - - def tearDown(self): - log.set_threshold(self.threshold) - log.Log._log = self._old_log - super().tearDown() - - def _log(self, level, msg, args): - if level not in (DEBUG, INFO, WARN, ERROR, FATAL): - raise ValueError('%s wrong log level' % str(level)) - if not isinstance(msg, str): - raise TypeError("msg should be str, not '%.200s'" - % (type(msg).__name__)) - self.logs.append((level, msg, args)) - - def get_logs(self, *levels): - return [msg % args for level, msg, args - in self.logs if level in levels] - - def clear_logs(self): - self.logs = [] - - -class TempdirManager(object): - """Mix-in class that handles temporary directories for test cases. - - This is intended to be used with unittest.TestCase. - """ - - def setUp(self): - super().setUp() - self.old_cwd = os.getcwd() - self.tempdirs = [] - - def tearDown(self): - # Restore working dir, for Solaris and derivatives, where rmdir() - # on the current directory fails. - os.chdir(self.old_cwd) - super().tearDown() - while self.tempdirs: - tmpdir = self.tempdirs.pop() - os_helper.rmtree(tmpdir) - - def mkdtemp(self): - """Create a temporary directory that will be cleaned up. - - Returns the path of the directory. - """ - d = tempfile.mkdtemp() - self.tempdirs.append(d) - return d - - def write_file(self, path, content='xxx'): - """Writes a file in the given path. - - - path can be a string or a sequence. - """ - if isinstance(path, (list, tuple)): - path = os.path.join(*path) - f = open(path, 'w') - try: - f.write(content) - finally: - f.close() - - def create_dist(self, pkg_name='foo', **kw): - """Will generate a test environment. - - This function creates: - - a Distribution instance using keywords - - a temporary directory with a package structure - - It returns the package directory and the distribution - instance. - """ - tmp_dir = self.mkdtemp() - pkg_dir = os.path.join(tmp_dir, pkg_name) - os.mkdir(pkg_dir) - dist = Distribution(attrs=kw) - - return pkg_dir, dist - - -class DummyCommand: - """Class to store options for retrieval via set_undefined_options().""" - - def __init__(self, **kwargs): - for kw, val in kwargs.items(): - setattr(self, kw, val) - - def ensure_finalized(self): - pass - - -class EnvironGuard(object): - - def setUp(self): - super(EnvironGuard, self).setUp() - self.old_environ = deepcopy(os.environ) - - def tearDown(self): - for key, value in self.old_environ.items(): - if os.environ.get(key) != value: - os.environ[key] = value - - for key in tuple(os.environ.keys()): - if key not in self.old_environ: - del os.environ[key] - - super(EnvironGuard, self).tearDown() - - -def copy_xxmodule_c(directory): - """Helper for tests that need the xxmodule.c source file. - - Example use: - - def test_compile(self): - copy_xxmodule_c(self.tmpdir) - self.assertIn('xxmodule.c', os.listdir(self.tmpdir)) - - If the source file can be found, it will be copied to *directory*. If not, - the test will be skipped. Errors during copy are not caught. - """ - filename = _get_xxmodule_path() - if filename is None: - raise unittest.SkipTest('cannot find xxmodule.c (test must run in ' - 'the python build dir)') - shutil.copy(filename, directory) - - -def _get_xxmodule_path(): - srcdir = sysconfig.get_config_var('srcdir') - candidates = [ - # use installed copy if available - os.path.join(os.path.dirname(__file__), 'xxmodule.c'), - # otherwise try using copy from build directory - os.path.join(srcdir, 'Modules', 'xxmodule.c'), - # srcdir mysteriously can be $srcdir/Lib/distutils/tests when - # this file is run from its parent directory, so walk up the - # tree to find the real srcdir - os.path.join(srcdir, '..', '..', '..', 'Modules', 'xxmodule.c'), - ] - for path in candidates: - if os.path.exists(path): - return path - - -def fixup_build_ext(cmd): - """Function needed to make build_ext tests pass. - - When Python was built with --enable-shared on Unix, -L. is not enough to - find libpython<blah>.so, because regrtest runs in a tempdir, not in the - source directory where the .so lives. - - When Python was built with in debug mode on Windows, build_ext commands - need their debug attribute set, and it is not done automatically for - some reason. - - This function handles both of these things. Example use: - - cmd = build_ext(dist) - support.fixup_build_ext(cmd) - cmd.ensure_finalized() - - Unlike most other Unix platforms, Mac OS X embeds absolute paths - to shared libraries into executables, so the fixup is not needed there. - """ - if os.name == 'nt': - cmd.debug = sys.executable.endswith('_d.exe') - elif sysconfig.get_config_var('Py_ENABLE_SHARED'): - # To further add to the shared builds fun on Unix, we can't just add - # library_dirs to the Extension() instance because that doesn't get - # plumbed through to the final compiler command. - runshared = sysconfig.get_config_var('RUNSHARED') - if runshared is None: - cmd.library_dirs = ['.'] - else: - if sys.platform == 'darwin': - cmd.library_dirs = [] - else: - name, equals, value = runshared.partition('=') - cmd.library_dirs = [d for d in value.split(os.pathsep) if d] diff --git a/Lib/distutils/tests/test_archive_util.py b/Lib/distutils/tests/test_archive_util.py deleted file mode 100644 index 8aec84078ed48f..00000000000000 --- a/Lib/distutils/tests/test_archive_util.py +++ /dev/null @@ -1,396 +0,0 @@ -# -*- coding: utf-8 -*- -"""Tests for distutils.archive_util.""" -import unittest -import os -import sys -import tarfile -from os.path import splitdrive -import warnings - -from distutils import archive_util -from distutils.archive_util import (check_archive_formats, make_tarball, - make_zipfile, make_archive, - ARCHIVE_FORMATS) -from distutils.spawn import find_executable, spawn -from distutils.tests import support -from test.support import run_unittest, patch -from test.support.os_helper import change_cwd -from test.support.warnings_helper import check_warnings - -try: - import grp - import pwd - UID_GID_SUPPORT = True -except ImportError: - UID_GID_SUPPORT = False - -try: - import zipfile - ZIP_SUPPORT = True -except ImportError: - ZIP_SUPPORT = find_executable('zip') - -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - -try: - import bz2 -except ImportError: - bz2 = None - -try: - import lzma -except ImportError: - lzma = None - -def can_fs_encode(filename): - """ - Return True if the filename can be saved in the file system. - """ - if os.path.supports_unicode_filenames: - return True - try: - filename.encode(sys.getfilesystemencoding()) - except UnicodeEncodeError: - return False - return True - - -class ArchiveUtilTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_make_tarball(self, name='archive'): - # creating something to tar - tmpdir = self._create_files() - self._make_tarball(tmpdir, name, '.tar.gz') - # trying an uncompressed one - self._make_tarball(tmpdir, name, '.tar', compress=None) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_make_tarball_gzip(self): - tmpdir = self._create_files() - self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip') - - @unittest.skipUnless(bz2, 'Need bz2 support to run') - def test_make_tarball_bzip2(self): - tmpdir = self._create_files() - self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2') - - @unittest.skipUnless(lzma, 'Need lzma support to run') - def test_make_tarball_xz(self): - tmpdir = self._create_files() - self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz') - - @unittest.skipUnless(can_fs_encode('årchiv'), - 'File system cannot handle this filename') - def test_make_tarball_latin1(self): - """ - Mirror test_make_tarball, except filename contains latin characters. - """ - self.test_make_tarball('årchiv') # note this isn't a real word - - @unittest.skipUnless(can_fs_encode('のアーカイブ'), - 'File system cannot handle this filename') - def test_make_tarball_extended(self): - """ - Mirror test_make_tarball, except filename contains extended - characters outside the latin charset. - """ - self.test_make_tarball('のアーカイブ') # japanese for archive - - def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): - tmpdir2 = self.mkdtemp() - unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], - "source and target should be on same drive") - - base_name = os.path.join(tmpdir2, target_name) - - # working with relative paths to avoid tar warnings - with change_cwd(tmpdir): - make_tarball(splitdrive(base_name)[1], 'dist', **kwargs) - - # check if the compressed tarball was created - tarball = base_name + suffix - self.assertTrue(os.path.exists(tarball)) - self.assertEqual(self._tarinfo(tarball), self._created_files) - - def _tarinfo(self, path): - tar = tarfile.open(path) - try: - names = tar.getnames() - names.sort() - return names - finally: - tar.close() - - _zip_created_files = ['dist/', 'dist/file1', 'dist/file2', - 'dist/sub/', 'dist/sub/file3', 'dist/sub2/'] - _created_files = [p.rstrip('/') for p in _zip_created_files] - - def _create_files(self): - # creating something to tar - tmpdir = self.mkdtemp() - dist = os.path.join(tmpdir, 'dist') - os.mkdir(dist) - self.write_file([dist, 'file1'], 'xxx') - self.write_file([dist, 'file2'], 'xxx') - os.mkdir(os.path.join(dist, 'sub')) - self.write_file([dist, 'sub', 'file3'], 'xxx') - os.mkdir(os.path.join(dist, 'sub2')) - return tmpdir - - @unittest.skipUnless(find_executable('tar') and find_executable('gzip') - and ZLIB_SUPPORT, - 'Need the tar, gzip and zlib command to run') - def test_tarfile_vs_tar(self): - tmpdir = self._create_files() - tmpdir2 = self.mkdtemp() - base_name = os.path.join(tmpdir2, 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(base_name, 'dist') - finally: - os.chdir(old_dir) - - # check if the compressed tarball was created - tarball = base_name + '.tar.gz' - self.assertTrue(os.path.exists(tarball)) - - # now create another tarball using `tar` - tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') - tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] - gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar'] - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - spawn(tar_cmd) - spawn(gzip_cmd) - finally: - os.chdir(old_dir) - - self.assertTrue(os.path.exists(tarball2)) - # let's compare both tarballs - self.assertEqual(self._tarinfo(tarball), self._created_files) - self.assertEqual(self._tarinfo(tarball2), self._created_files) - - # trying an uncompressed one - base_name = os.path.join(tmpdir2, 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(base_name, 'dist', compress=None) - finally: - os.chdir(old_dir) - tarball = base_name + '.tar' - self.assertTrue(os.path.exists(tarball)) - - # now for a dry_run - base_name = os.path.join(tmpdir2, 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(base_name, 'dist', compress=None, dry_run=True) - finally: - os.chdir(old_dir) - tarball = base_name + '.tar' - self.assertTrue(os.path.exists(tarball)) - - @unittest.skipUnless(find_executable('compress'), - 'The compress program is required') - def test_compress_deprecated(self): - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - - # using compress and testing the PendingDeprecationWarning - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - with check_warnings() as w: - warnings.simplefilter("always") - make_tarball(base_name, 'dist', compress='compress') - finally: - os.chdir(old_dir) - tarball = base_name + '.tar.Z' - self.assertTrue(os.path.exists(tarball)) - self.assertEqual(len(w.warnings), 1) - - # same test with dry_run - os.remove(tarball) - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - with check_warnings() as w: - warnings.simplefilter("always") - make_tarball(base_name, 'dist', compress='compress', - dry_run=True) - finally: - os.chdir(old_dir) - self.assertFalse(os.path.exists(tarball)) - self.assertEqual(len(w.warnings), 1) - - @unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT, - 'Need zip and zlib support to run') - def test_make_zipfile(self): - # creating something to tar - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - with change_cwd(tmpdir): - make_zipfile(base_name, 'dist') - - # check if the compressed tarball was created - tarball = base_name + '.zip' - self.assertTrue(os.path.exists(tarball)) - with zipfile.ZipFile(tarball) as zf: - self.assertEqual(sorted(zf.namelist()), self._zip_created_files) - - @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') - def test_make_zipfile_no_zlib(self): - patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError - - called = [] - zipfile_class = zipfile.ZipFile - def fake_zipfile(*a, **kw): - if kw.get('compression', None) == zipfile.ZIP_STORED: - called.append((a, kw)) - return zipfile_class(*a, **kw) - - patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile) - - # create something to tar and compress - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - with change_cwd(tmpdir): - make_zipfile(base_name, 'dist') - - tarball = base_name + '.zip' - self.assertEqual(called, - [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) - self.assertTrue(os.path.exists(tarball)) - with zipfile.ZipFile(tarball) as zf: - self.assertEqual(sorted(zf.namelist()), self._zip_created_files) - - def test_check_archive_formats(self): - self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), - 'xxx') - self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar', - 'ztar', 'tar', 'zip'])) - - def test_make_archive(self): - tmpdir = self.mkdtemp() - base_name = os.path.join(tmpdir, 'archive') - self.assertRaises(ValueError, make_archive, base_name, 'xxx') - - def test_make_archive_cwd(self): - current_dir = os.getcwd() - def _breaks(*args, **kw): - raise RuntimeError() - ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file') - try: - try: - make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) - except: - pass - self.assertEqual(os.getcwd(), current_dir) - finally: - del ARCHIVE_FORMATS['xxx'] - - def test_make_archive_tar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'tar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar') - self.assertEqual(self._tarinfo(res), self._created_files) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_make_archive_gztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'gztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.gz') - self.assertEqual(self._tarinfo(res), self._created_files) - - @unittest.skipUnless(bz2, 'Need bz2 support to run') - def test_make_archive_bztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'bztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.bz2') - self.assertEqual(self._tarinfo(res), self._created_files) - - @unittest.skipUnless(lzma, 'Need xz support to run') - def test_make_archive_xztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'xztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.xz') - self.assertEqual(self._tarinfo(res), self._created_files) - - def test_make_archive_owner_group(self): - # testing make_archive with owner and group, with various combinations - # this works even if there's not gid/uid support - if UID_GID_SUPPORT: - group = grp.getgrgid(0)[0] - owner = pwd.getpwuid(0)[0] - else: - group = owner = 'root' - - base_dir = self._create_files() - root_dir = self.mkdtemp() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, - group=group) - self.assertTrue(os.path.exists(res)) - - res = make_archive(base_name, 'zip', root_dir, base_dir) - self.assertTrue(os.path.exists(res)) - - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner=owner, group=group) - self.assertTrue(os.path.exists(res)) - - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner='kjhkjhkjg', group='oihohoh') - self.assertTrue(os.path.exists(res)) - - @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib") - @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") - def test_tarfile_root_owner(self): - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - group = grp.getgrgid(0)[0] - owner = pwd.getpwuid(0)[0] - try: - archive_name = make_tarball(base_name, 'dist', compress=None, - owner=owner, group=group) - finally: - os.chdir(old_dir) - - # check if the compressed tarball was created - self.assertTrue(os.path.exists(archive_name)) - - # now checks the rights - archive = tarfile.open(archive_name) - try: - for member in archive.getmembers(): - self.assertEqual(member.uid, 0) - self.assertEqual(member.gid, 0) - finally: - archive.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ArchiveUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_bdist.py b/Lib/distutils/tests/test_bdist.py deleted file mode 100644 index 5676f7f34d4292..00000000000000 --- a/Lib/distutils/tests/test_bdist.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Tests for distutils.command.bdist.""" -import unittest -from test.support import run_unittest - -import warnings -with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - from distutils.command.bdist import bdist - from distutils.tests import support - - -class BuildTestCase(support.TempdirManager, - unittest.TestCase): - - def test_formats(self): - # let's create a command and make sure - # we can set the format - dist = self.create_dist()[1] - cmd = bdist(dist) - cmd.formats = ['tar'] - cmd.ensure_finalized() - self.assertEqual(cmd.formats, ['tar']) - - # what formats does bdist offer? - formats = ['bztar', 'gztar', 'rpm', 'tar', 'xztar', 'zip', 'ztar'] - found = sorted(cmd.format_command) - self.assertEqual(found, formats) - - def test_skip_build(self): - # bug #10946: bdist --skip-build should trickle down to subcommands - dist = self.create_dist()[1] - cmd = bdist(dist) - cmd.skip_build = 1 - cmd.ensure_finalized() - dist.command_obj['bdist'] = cmd - - for name in ['bdist_dumb']: # bdist_rpm does not support --skip-build - subcmd = cmd.get_finalized_command(name) - if getattr(subcmd, '_unsupported', False): - # command is not supported on this build - continue - self.assertTrue(subcmd.skip_build, - '%s should take --skip-build from bdist' % name) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) - - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_bdist_dumb.py b/Lib/distutils/tests/test_bdist_dumb.py deleted file mode 100644 index bb860c8ac70345..00000000000000 --- a/Lib/distutils/tests/test_bdist_dumb.py +++ /dev/null @@ -1,97 +0,0 @@ -"""Tests for distutils.command.bdist_dumb.""" - -import os -import sys -import zipfile -import unittest -from test.support import run_unittest - -from distutils.core import Distribution -from distutils.command.bdist_dumb import bdist_dumb -from distutils.tests import support - -SETUP_PY = """\ -from distutils.core import setup -import foo - -setup(name='foo', version='0.1', py_modules=['foo'], - url='xxx', author='xxx', author_email='xxx') - -""" - -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - - -class BuildDumbTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(BuildDumbTestCase, self).setUp() - self.old_location = os.getcwd() - self.old_sys_argv = sys.argv, sys.argv[:] - - def tearDown(self): - os.chdir(self.old_location) - sys.argv = self.old_sys_argv[0] - sys.argv[:] = self.old_sys_argv[1] - super(BuildDumbTestCase, self).tearDown() - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_simple_built(self): - - # let's create a simple package - tmp_dir = self.mkdtemp() - pkg_dir = os.path.join(tmp_dir, 'foo') - os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'setup.py'), SETUP_PY) - self.write_file((pkg_dir, 'foo.py'), '#') - self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') - self.write_file((pkg_dir, 'README'), '') - - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) - dist.script_name = 'setup.py' - os.chdir(pkg_dir) - - sys.argv = ['setup.py'] - cmd = bdist_dumb(dist) - - # so the output is the same no matter - # what is the platform - cmd.format = 'zip' - - cmd.ensure_finalized() - cmd.run() - - # see what we have - dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name) - - self.assertEqual(dist_created, [base]) - - # now let's check what we have in the zip file - fp = zipfile.ZipFile(os.path.join('dist', base)) - try: - contents = fp.namelist() - finally: - fp.close() - - contents = sorted(filter(None, map(os.path.basename, contents))) - wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2], 'foo.py'] - if not sys.dont_write_bytecode: - wanted.append('foo.%s.pyc' % sys.implementation.cache_tag) - self.assertEqual(contents, sorted(wanted)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildDumbTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_bdist_rpm.py b/Lib/distutils/tests/test_bdist_rpm.py deleted file mode 100644 index 7eefa7b9cad84f..00000000000000 --- a/Lib/distutils/tests/test_bdist_rpm.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Tests for distutils.command.bdist_rpm.""" - -import unittest -import sys -import os -from test.support import run_unittest, requires_zlib - -from distutils.core import Distribution -from distutils.command.bdist_rpm import bdist_rpm -from distutils.tests import support -from distutils.spawn import find_executable - -SETUP_PY = """\ -from distutils.core import setup -import foo - -setup(name='foo', version='0.1', py_modules=['foo'], - url='xxx', author='xxx', author_email='xxx') - -""" - -class BuildRpmTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): - - def setUp(self): - try: - sys.executable.encode("UTF-8") - except UnicodeEncodeError: - raise unittest.SkipTest("sys.executable is not encodable to UTF-8") - - super(BuildRpmTestCase, self).setUp() - self.old_location = os.getcwd() - self.old_sys_argv = sys.argv, sys.argv[:] - - def tearDown(self): - os.chdir(self.old_location) - sys.argv = self.old_sys_argv[0] - sys.argv[:] = self.old_sys_argv[1] - super(BuildRpmTestCase, self).tearDown() - - # XXX I am unable yet to make this test work without - # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib() - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') - # import foo fails with safe path - @unittest.skipIf(sys.flags.safe_path, - 'PYTHONSAFEPATH changes default sys.path') - def test_quiet(self): - # let's create a package - tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation - pkg_dir = os.path.join(tmp_dir, 'foo') - os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'setup.py'), SETUP_PY) - self.write_file((pkg_dir, 'foo.py'), '#') - self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') - self.write_file((pkg_dir, 'README'), '') - - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) - dist.script_name = 'setup.py' - os.chdir(pkg_dir) - - sys.argv = ['setup.py'] - cmd = bdist_rpm(dist) - cmd.fix_python = True - - # running in quiet mode - cmd.quiet = 1 - cmd.ensure_finalized() - cmd.run() - - dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - self.assertIn('foo-0.1-1.noarch.rpm', dist_created) - - # bug #2945: upload ignores bdist_rpm files - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) - - # XXX I am unable yet to make this test work without - # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib() - # http://bugs.python.org/issue1533164 - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') - # import foo fails with safe path - @unittest.skipIf(sys.flags.safe_path, - 'PYTHONSAFEPATH changes default sys.path') - def test_no_optimize_flag(self): - # let's create a package that breaks bdist_rpm - tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation - pkg_dir = os.path.join(tmp_dir, 'foo') - os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'setup.py'), SETUP_PY) - self.write_file((pkg_dir, 'foo.py'), '#') - self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') - self.write_file((pkg_dir, 'README'), '') - - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) - dist.script_name = 'setup.py' - os.chdir(pkg_dir) - - sys.argv = ['setup.py'] - cmd = bdist_rpm(dist) - cmd.fix_python = True - - cmd.quiet = 1 - cmd.ensure_finalized() - cmd.run() - - dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - self.assertIn('foo-0.1-1.noarch.rpm', dist_created) - - # bug #2945: upload ignores bdist_rpm files - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) - - os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm')) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildRpmTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build.py b/Lib/distutils/tests/test_build.py deleted file mode 100644 index 71b5e164bae14a..00000000000000 --- a/Lib/distutils/tests/test_build.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Tests for distutils.command.build.""" -import unittest -import os -import sys -from test.support import run_unittest - -from distutils.command.build import build -from distutils.tests import support -from sysconfig import get_platform - -class BuildTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - @unittest.skipUnless(sys.executable, "test requires sys.executable") - def test_finalize_options(self): - pkg_dir, dist = self.create_dist() - cmd = build(dist) - cmd.finalize_options() - - # if not specified, plat_name gets the current platform - self.assertEqual(cmd.plat_name, get_platform()) - - # build_purelib is build + lib - wanted = os.path.join(cmd.build_base, 'lib') - self.assertEqual(cmd.build_purelib, wanted) - - # build_platlib is 'build/lib.platform-x.x[-pydebug]' - # examples: - # build/lib.macosx-10.3-i386-2.7 - plat_spec = '.%s-%d.%d' % (cmd.plat_name, *sys.version_info[:2]) - if hasattr(sys, 'gettotalrefcount'): - self.assertTrue(cmd.build_platlib.endswith('-pydebug')) - plat_spec += '-pydebug' - wanted = os.path.join(cmd.build_base, 'lib' + plat_spec) - self.assertEqual(cmd.build_platlib, wanted) - - # by default, build_lib = build_purelib - self.assertEqual(cmd.build_lib, cmd.build_purelib) - - # build_temp is build/temp.<plat> - wanted = os.path.join(cmd.build_base, 'temp' + plat_spec) - self.assertEqual(cmd.build_temp, wanted) - - # build_scripts is build/scripts-x.x - wanted = os.path.join(cmd.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) - self.assertEqual(cmd.build_scripts, wanted) - - # executable is os.path.normpath(sys.executable) - self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build_clib.py b/Lib/distutils/tests/test_build_clib.py deleted file mode 100644 index 95f928288e0048..00000000000000 --- a/Lib/distutils/tests/test_build_clib.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Tests for distutils.command.build_clib.""" -import unittest -import os -import sys -import sysconfig - -from test.support import ( - run_unittest, missing_compiler_executable, requires_subprocess -) - -from distutils.command.build_clib import build_clib -from distutils.errors import DistutilsSetupError -from distutils.tests import support - -class BuildCLibTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def setUp(self): - super().setUp() - self._backup_CONFIG_VARS = dict(sysconfig._CONFIG_VARS) - - def tearDown(self): - super().tearDown() - sysconfig._CONFIG_VARS.clear() - sysconfig._CONFIG_VARS.update(self._backup_CONFIG_VARS) - - def test_check_library_dist(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - # 'libraries' option must be a list - self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo') - - # each element of 'libraries' must a 2-tuple - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - ['foo1', 'foo2']) - - # first element of each tuple in 'libraries' - # must be a string (the library name) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [(1, 'foo1'), ('name', 'foo2')]) - - # library name may not contain directory separators - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', 'foo1'), - ('another/name', 'foo2')]) - - # second element of each tuple must be a dictionary (build info) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', {}), - ('another', 'foo2')]) - - # those work - libs = [('name', {}), ('name', {'ok': 'good'})] - cmd.check_library_list(libs) - - def test_get_source_files(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - # "in 'libraries' option 'sources' must be present and must be - # a list of source filenames - cmd.libraries = [('name', {})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) - - cmd.libraries = [('name', {'sources': 1})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) - - cmd.libraries = [('name', {'sources': ['a', 'b']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) - - cmd.libraries = [('name', {'sources': ('a', 'b')})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) - - cmd.libraries = [('name', {'sources': ('a', 'b')}), - ('name2', {'sources': ['c', 'd']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd']) - - def test_build_libraries(self): - - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - class FakeCompiler: - def compile(*args, **kw): - pass - create_static_lib = compile - - cmd.compiler = FakeCompiler() - - # build_libraries is also doing a bit of typo checking - lib = [('name', {'sources': 'notvalid'})] - self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) - - lib = [('name', {'sources': list()})] - cmd.build_libraries(lib) - - lib = [('name', {'sources': tuple()})] - cmd.build_libraries(lib) - - def test_finalize_options(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - cmd.include_dirs = 'one-dir' - cmd.finalize_options() - self.assertEqual(cmd.include_dirs, ['one-dir']) - - cmd.include_dirs = None - cmd.finalize_options() - self.assertEqual(cmd.include_dirs, []) - - cmd.distribution.libraries = 'WONTWORK' - self.assertRaises(DistutilsSetupError, cmd.finalize_options) - - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") - @requires_subprocess() - def test_run(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - foo_c = os.path.join(pkg_dir, 'foo.c') - self.write_file(foo_c, 'int main(void) { return 1;}\n') - cmd.libraries = [('foo', {'sources': [foo_c]})] - - build_temp = os.path.join(pkg_dir, 'build') - os.mkdir(build_temp) - cmd.build_temp = build_temp - cmd.build_clib = build_temp - - # Before we run the command, we want to make sure - # all commands are present on the system. - ccmd = missing_compiler_executable() - if ccmd is not None: - self.skipTest('The %r command is not found' % ccmd) - - # this should work - cmd.run() - - # let's check the result - self.assertIn('libfoo.a', os.listdir(build_temp)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildCLibTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build_ext.py b/Lib/distutils/tests/test_build_ext.py deleted file mode 100644 index 4ebeafecef03c3..00000000000000 --- a/Lib/distutils/tests/test_build_ext.py +++ /dev/null @@ -1,555 +0,0 @@ -import sys -import os -from io import StringIO -import textwrap - -from distutils.core import Distribution -from distutils.command.build_ext import build_ext -from distutils import sysconfig -from distutils.tests.support import (TempdirManager, LoggingSilencer, - copy_xxmodule_c, fixup_build_ext) -from distutils.extension import Extension -from distutils.errors import ( - CompileError, DistutilsPlatformError, DistutilsSetupError, - UnknownFileError) - -import unittest -from test import support -from test.support import os_helper -from test.support.script_helper import assert_python_ok -from test.support import threading_helper - -# http://bugs.python.org/issue4373 -# Don't load the xx module more than once. -ALREADY_TESTED = False - - -class BuildExtTestCase(TempdirManager, - LoggingSilencer, - unittest.TestCase): - def setUp(self): - # Create a simple test environment - super(BuildExtTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - import site - self.old_user_base = site.USER_BASE - site.USER_BASE = self.mkdtemp() - from distutils.command import build_ext - build_ext.USER_BASE = site.USER_BASE - self.old_config_vars = dict(sysconfig._config_vars) - - # bpo-30132: On Windows, a .pdb file may be created in the current - # working directory. Create a temporary working directory to cleanup - # everything at the end of the test. - self.enterContext(os_helper.change_cwd(self.tmp_dir)) - - def tearDown(self): - import site - site.USER_BASE = self.old_user_base - from distutils.command import build_ext - build_ext.USER_BASE = self.old_user_base - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self.old_config_vars) - super(BuildExtTestCase, self).tearDown() - - def build_ext(self, *args, **kwargs): - return build_ext(*args, **kwargs) - - @support.requires_subprocess() - def test_build_ext(self): - cmd = support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - global ALREADY_TESTED - copy_xxmodule_c(self.tmp_dir) - xx_c = os.path.join(self.tmp_dir, 'xxmodule.c') - xx_ext = Extension('xx', [xx_c]) - dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]}) - dist.package_dir = self.tmp_dir - cmd = self.build_ext(dist) - fixup_build_ext(cmd) - cmd.build_lib = self.tmp_dir - cmd.build_temp = self.tmp_dir - - old_stdout = sys.stdout - if not support.verbose: - # silence compiler output - sys.stdout = StringIO() - try: - cmd.ensure_finalized() - cmd.run() - finally: - sys.stdout = old_stdout - - if ALREADY_TESTED: - self.skipTest('Already tested in %s' % ALREADY_TESTED) - else: - ALREADY_TESTED = type(self).__name__ - - code = textwrap.dedent(f""" - tmp_dir = {self.tmp_dir!r} - - import sys - import unittest - from test import support - - sys.path.insert(0, tmp_dir) - import xx - - class Tests(unittest.TestCase): - def test_xx(self): - for attr in ('error', 'foo', 'new', 'roj'): - self.assertTrue(hasattr(xx, attr)) - - self.assertEqual(xx.foo(2, 5), 7) - self.assertEqual(xx.foo(13,15), 28) - self.assertEqual(xx.new().demo(), None) - if support.HAVE_DOCSTRINGS: - doc = 'This is a template module just for instruction.' - self.assertEqual(xx.__doc__, doc) - self.assertIsInstance(xx.Null(), xx.Null) - self.assertIsInstance(xx.Str(), xx.Str) - - - unittest.main() - """) - assert_python_ok('-c', code) - - def test_solaris_enable_shared(self): - dist = Distribution({'name': 'xx'}) - cmd = self.build_ext(dist) - old = sys.platform - - sys.platform = 'sunos' # fooling finalize_options - from distutils.sysconfig import _config_vars - old_var = _config_vars.get('Py_ENABLE_SHARED') - _config_vars['Py_ENABLE_SHARED'] = 1 - try: - cmd.ensure_finalized() - finally: - sys.platform = old - if old_var is None: - del _config_vars['Py_ENABLE_SHARED'] - else: - _config_vars['Py_ENABLE_SHARED'] = old_var - - # make sure we get some library dirs under solaris - self.assertGreater(len(cmd.library_dirs), 0) - - def test_user_site(self): - import site - dist = Distribution({'name': 'xx'}) - cmd = self.build_ext(dist) - - # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] - self.assertIn('user', options) - - # setting a value - cmd.user = 1 - - # setting user based lib and include - lib = os.path.join(site.USER_BASE, 'lib') - incl = os.path.join(site.USER_BASE, 'include') - os.mkdir(lib) - os.mkdir(incl) - - # let's run finalize - cmd.ensure_finalized() - - # see if include_dirs and library_dirs - # were set - self.assertIn(lib, cmd.library_dirs) - self.assertIn(lib, cmd.rpath) - self.assertIn(incl, cmd.include_dirs) - - @threading_helper.requires_working_threading() - def test_optional_extension(self): - - # this extension will fail, but let's ignore this failure - # with the optional argument. - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - self.assertRaises((UnknownFileError, CompileError), - cmd.run) # should raise an error - - modules = [Extension('foo', ['xxx'], optional=True)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - cmd.run() # should pass - - def test_finalize_options(self): - # Make sure Python's include directories (for Python.h, pyconfig.h, - # etc.) are in the include search path. - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.finalize_options() - - py_include = sysconfig.get_python_inc() - for p in py_include.split(os.path.pathsep): - self.assertIn(p, cmd.include_dirs) - - plat_py_include = sysconfig.get_python_inc(plat_specific=1) - for p in plat_py_include.split(os.path.pathsep): - self.assertIn(p, cmd.include_dirs) - - # make sure cmd.libraries is turned into a list - # if it's a string - cmd = self.build_ext(dist) - cmd.libraries = 'my_lib, other_lib lastlib' - cmd.finalize_options() - self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib']) - - # make sure cmd.library_dirs is turned into a list - # if it's a string - cmd = self.build_ext(dist) - cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep - cmd.finalize_options() - self.assertIn('my_lib_dir', cmd.library_dirs) - self.assertIn('other_lib_dir', cmd.library_dirs) - - # make sure rpath is turned into a list - # if it's a string - cmd = self.build_ext(dist) - cmd.rpath = 'one%stwo' % os.pathsep - cmd.finalize_options() - self.assertEqual(cmd.rpath, ['one', 'two']) - - # make sure cmd.link_objects is turned into a list - # if it's a string - cmd = build_ext(dist) - cmd.link_objects = 'one two,three' - cmd.finalize_options() - self.assertEqual(cmd.link_objects, ['one', 'two', 'three']) - - # XXX more tests to perform for win32 - - # make sure define is turned into 2-tuples - # strings if they are ','-separated strings - cmd = self.build_ext(dist) - cmd.define = 'one,two' - cmd.finalize_options() - self.assertEqual(cmd.define, [('one', '1'), ('two', '1')]) - - # make sure undef is turned into a list of - # strings if they are ','-separated strings - cmd = self.build_ext(dist) - cmd.undef = 'one,two' - cmd.finalize_options() - self.assertEqual(cmd.undef, ['one', 'two']) - - # make sure swig_opts is turned into a list - cmd = self.build_ext(dist) - cmd.swig_opts = None - cmd.finalize_options() - self.assertEqual(cmd.swig_opts, []) - - cmd = self.build_ext(dist) - cmd.swig_opts = '1 2' - cmd.finalize_options() - self.assertEqual(cmd.swig_opts, ['1', '2']) - - def test_check_extensions_list(self): - dist = Distribution() - cmd = self.build_ext(dist) - cmd.finalize_options() - - #'extensions' option must be a list of Extension instances - self.assertRaises(DistutilsSetupError, - cmd.check_extensions_list, 'foo') - - # each element of 'ext_modules' option must be an - # Extension instance or 2-tuple - exts = [('bar', 'foo', 'bar'), 'foo'] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - # first element of each tuple in 'ext_modules' - # must be the extension name (a string) and match - # a python dotted-separated name - exts = [('foo-bar', '')] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - # second element of each tuple in 'ext_modules' - # must be a dictionary (build info) - exts = [('foo.bar', '')] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - # ok this one should pass - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar'})] - cmd.check_extensions_list(exts) - ext = exts[0] - self.assertIsInstance(ext, Extension) - - # check_extensions_list adds in ext the values passed - # when they are in ('include_dirs', 'library_dirs', 'libraries' - # 'extra_objects', 'extra_compile_args', 'extra_link_args') - self.assertEqual(ext.libraries, 'foo') - self.assertFalse(hasattr(ext, 'some')) - - # 'macros' element of build info dict must be 1- or 2-tuple - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - exts[0][1]['macros'] = [('1', '2'), ('3',)] - cmd.check_extensions_list(exts) - self.assertEqual(exts[0].undef_macros, ['3']) - self.assertEqual(exts[0].define_macros, [('1', '2')]) - - def test_get_source_files(self): - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - self.assertEqual(cmd.get_source_files(), ['xxx']) - - def test_unicode_module_names(self): - modules = [ - Extension('foo', ['aaa'], optional=False), - Extension('föö', ['uuu'], optional=False), - ] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*') - self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*') - self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo']) - self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_gkaa']) - - def test_compiler_option(self): - # cmd.compiler is an option and - # should not be overridden by a compiler instance - # when the command is run - dist = Distribution() - cmd = self.build_ext(dist) - cmd.compiler = 'unix' - cmd.ensure_finalized() - cmd.run() - self.assertEqual(cmd.compiler, 'unix') - - @support.requires_subprocess() - def test_get_outputs(self): - cmd = support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - tmp_dir = self.mkdtemp() - c_file = os.path.join(tmp_dir, 'foo.c') - self.write_file(c_file, 'void PyInit_foo(void) {}\n') - ext = Extension('foo', [c_file], optional=False) - dist = Distribution({'name': 'xx', - 'ext_modules': [ext]}) - cmd = self.build_ext(dist) - fixup_build_ext(cmd) - cmd.ensure_finalized() - self.assertEqual(len(cmd.get_outputs()), 1) - - cmd.build_lib = os.path.join(self.tmp_dir, 'build') - cmd.build_temp = os.path.join(self.tmp_dir, 'tempt') - - # issue #5977 : distutils build_ext.get_outputs - # returns wrong result with --inplace - other_tmp_dir = os.path.realpath(self.mkdtemp()) - old_wd = os.getcwd() - os.chdir(other_tmp_dir) - try: - cmd.inplace = 1 - cmd.run() - so_file = cmd.get_outputs()[0] - finally: - os.chdir(old_wd) - self.assertTrue(os.path.exists(so_file)) - ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') - self.assertTrue(so_file.endswith(ext_suffix)) - so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, other_tmp_dir) - - cmd.inplace = 0 - cmd.compiler = None - cmd.run() - so_file = cmd.get_outputs()[0] - self.assertTrue(os.path.exists(so_file)) - self.assertTrue(so_file.endswith(ext_suffix)) - so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, cmd.build_lib) - - # inplace = 0, cmd.package = 'bar' - build_py = cmd.get_finalized_command('build_py') - build_py.package_dir = {'': 'bar'} - path = cmd.get_ext_fullpath('foo') - # checking that the last directory is the build_dir - path = os.path.split(path)[0] - self.assertEqual(path, cmd.build_lib) - - # inplace = 1, cmd.package = 'bar' - cmd.inplace = 1 - other_tmp_dir = os.path.realpath(self.mkdtemp()) - old_wd = os.getcwd() - os.chdir(other_tmp_dir) - try: - path = cmd.get_ext_fullpath('foo') - finally: - os.chdir(old_wd) - # checking that the last directory is bar - path = os.path.split(path)[0] - lastdir = os.path.split(path)[-1] - self.assertEqual(lastdir, 'bar') - - def test_ext_fullpath(self): - ext = sysconfig.get_config_var('EXT_SUFFIX') - # building lxml.etree inplace - #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') - #etree_ext = Extension('lxml.etree', [etree_c]) - #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) - dist = Distribution() - cmd = self.build_ext(dist) - cmd.inplace = 1 - cmd.distribution.package_dir = {'': 'src'} - cmd.distribution.packages = ['lxml', 'lxml.html'] - curdir = os.getcwd() - wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) - path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) - - # building lxml.etree not inplace - cmd.inplace = 0 - cmd.build_lib = os.path.join(curdir, 'tmpdir') - wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) - path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) - - # building twisted.runner.portmap not inplace - build_py = cmd.get_finalized_command('build_py') - build_py.package_dir = {} - cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] - path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', - 'portmap' + ext) - self.assertEqual(wanted, path) - - # building twisted.runner.portmap inplace - cmd.inplace = 1 - path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) - self.assertEqual(wanted, path) - - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') - def test_deployment_target_default(self): - # Issue 9516: Test that, in the absence of the environment variable, - # an extension module is compiled with the same deployment target as - # the interpreter. - self._try_compile_deployment_target('==', None) - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') - def test_deployment_target_too_low(self): - # Issue 9516: Test that an extension module is not allowed to be - # compiled with a deployment target less than that of the interpreter. - self.assertRaises(DistutilsPlatformError, - self._try_compile_deployment_target, '>', '10.1') - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') - def test_deployment_target_higher_ok(self): - # Issue 9516: Test that an extension module can be compiled with a - # deployment target higher than that of the interpreter: the ext - # module may depend on some newer OS feature. - deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if deptarget: - # increment the minor version number (i.e. 10.6 -> 10.7) - deptarget = [int(x) for x in deptarget.split('.')] - deptarget[-1] += 1 - deptarget = '.'.join(str(i) for i in deptarget) - self._try_compile_deployment_target('<', deptarget) - - def _try_compile_deployment_target(self, operator, target): - orig_environ = os.environ - os.environ = orig_environ.copy() - self.addCleanup(setattr, os, 'environ', orig_environ) - - if target is None: - if os.environ.get('MACOSX_DEPLOYMENT_TARGET'): - del os.environ['MACOSX_DEPLOYMENT_TARGET'] - else: - os.environ['MACOSX_DEPLOYMENT_TARGET'] = target - - deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') - - with open(deptarget_c, 'w') as fp: - fp.write(textwrap.dedent('''\ - #include <AvailabilityMacros.h> - - int dummy; - - #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED - #else - #error "Unexpected target" - #endif - - ''' % operator)) - - # get the deployment target that the interpreter was built with - target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - target = tuple(map(int, target.split('.')[0:2])) - # format the target value as defined in the Apple - # Availability Macros. We can't use the macro names since - # at least one value we test with will not exist yet. - if target[:2] < (10, 10): - # for 10.1 through 10.9.x -> "10n0" - target = '%02d%01d0' % target - else: - # for 10.10 and beyond -> "10nn00" - if len(target) >= 2: - target = '%02d%02d00' % target - else: - # 11 and later can have no minor version (11 instead of 11.0) - target = '%02d0000' % target - deptarget_ext = Extension( - 'deptarget', - [deptarget_c], - extra_compile_args=['-DTARGET=%s'%(target,)], - ) - dist = Distribution({ - 'name': 'deptarget', - 'ext_modules': [deptarget_ext] - }) - dist.package_dir = self.tmp_dir - cmd = self.build_ext(dist) - cmd.build_lib = self.tmp_dir - cmd.build_temp = self.tmp_dir - - try: - old_stdout = sys.stdout - if not support.verbose: - # silence compiler output - sys.stdout = StringIO() - try: - cmd.ensure_finalized() - cmd.run() - finally: - sys.stdout = old_stdout - - except CompileError: - self.fail("Wrong deployment target during compilation") - - -class ParallelBuildExtTestCase(BuildExtTestCase): - - def build_ext(self, *args, **kwargs): - build_ext = super().build_ext(*args, **kwargs) - build_ext.parallel = True - return build_ext - - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(BuildExtTestCase)) - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(ParallelBuildExtTestCase)) - return suite - -if __name__ == '__main__': - support.run_unittest(__name__) diff --git a/Lib/distutils/tests/test_build_py.py b/Lib/distutils/tests/test_build_py.py deleted file mode 100644 index 44a06cc963aa3c..00000000000000 --- a/Lib/distutils/tests/test_build_py.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Tests for distutils.command.build_py.""" - -import os -import sys -import unittest - -from distutils.command.build_py import build_py -from distutils.core import Distribution -from distutils.errors import DistutilsFileError - -from distutils.tests import support -from test.support import run_unittest, requires_subprocess - - -class BuildPyTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_package_data(self): - sources = self.mkdtemp() - f = open(os.path.join(sources, "__init__.py"), "w") - try: - f.write("# Pretend this is a package.") - finally: - f.close() - f = open(os.path.join(sources, "README.txt"), "w") - try: - f.write("Info about this package") - finally: - f.close() - - destination = self.mkdtemp() - - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": sources}}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") - dist.command_obj["build"] = support.DummyCommand( - force=0, - build_lib=destination) - dist.packages = ["pkg"] - dist.package_data = {"pkg": ["README.txt"]} - dist.package_dir = {"pkg": sources} - - cmd = build_py(dist) - cmd.compile = 1 - cmd.ensure_finalized() - self.assertEqual(cmd.package_data, dist.package_data) - - cmd.run() - - # This makes sure the list of outputs includes byte-compiled - # files for Python modules but not for package data files - # (there shouldn't *be* byte-code files for those!). - self.assertEqual(len(cmd.get_outputs()), 3) - pkgdest = os.path.join(destination, "pkg") - files = os.listdir(pkgdest) - pycache_dir = os.path.join(pkgdest, "__pycache__") - self.assertIn("__init__.py", files) - self.assertIn("README.txt", files) - if sys.dont_write_bytecode: - self.assertFalse(os.path.exists(pycache_dir)) - else: - pyc_files = os.listdir(pycache_dir) - self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag, - pyc_files) - - def test_empty_package_dir(self): - # See bugs #1668596/#1720897 - sources = self.mkdtemp() - open(os.path.join(sources, "__init__.py"), "w").close() - - testdir = os.path.join(sources, "doc") - os.mkdir(testdir) - open(os.path.join(testdir, "testfile"), "w").close() - - os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": ""}, - "package_data": {"pkg": ["doc/*"]}}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") - dist.script_args = ["build"] - dist.parse_command_line() - - try: - dist.run_commands() - except DistutilsFileError: - self.fail("failed package_data test when package_dir is ''") - - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') - @requires_subprocess() - def test_byte_compile(self): - project_dir, dist = self.create_dist(py_modules=['boiledeggs']) - os.chdir(project_dir) - self.write_file('boiledeggs.py', 'import antigravity') - cmd = build_py(dist) - cmd.compile = 1 - cmd.build_lib = 'here' - cmd.finalize_options() - cmd.run() - - found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) - found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - self.assertEqual(found, - ['boiledeggs.%s.pyc' % sys.implementation.cache_tag]) - - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') - @requires_subprocess() - def test_byte_compile_optimized(self): - project_dir, dist = self.create_dist(py_modules=['boiledeggs']) - os.chdir(project_dir) - self.write_file('boiledeggs.py', 'import antigravity') - cmd = build_py(dist) - cmd.compile = 0 - cmd.optimize = 1 - cmd.build_lib = 'here' - cmd.finalize_options() - cmd.run() - - found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) - found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - expect = 'boiledeggs.{}.opt-1.pyc'.format(sys.implementation.cache_tag) - self.assertEqual(sorted(found), [expect]) - - def test_dir_in_package_data(self): - """ - A directory in package_data should not be added to the filelist. - """ - # See bug 19286 - sources = self.mkdtemp() - pkg_dir = os.path.join(sources, "pkg") - - os.mkdir(pkg_dir) - open(os.path.join(pkg_dir, "__init__.py"), "w").close() - - docdir = os.path.join(pkg_dir, "doc") - os.mkdir(docdir) - open(os.path.join(docdir, "testfile"), "w").close() - - # create the directory that could be incorrectly detected as a file - os.mkdir(os.path.join(docdir, 'otherdir')) - - os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_data": {"pkg": ["doc/*"]}}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") - dist.script_args = ["build"] - dist.parse_command_line() - - try: - dist.run_commands() - except DistutilsFileError: - self.fail("failed package_data when data dir includes a dir") - - def test_dont_write_bytecode(self): - # makes sure byte_compile is not used - dist = self.create_dist()[1] - cmd = build_py(dist) - cmd.compile = 1 - cmd.optimize = 1 - - old_dont_write_bytecode = sys.dont_write_bytecode - sys.dont_write_bytecode = True - try: - cmd.byte_compile([]) - finally: - sys.dont_write_bytecode = old_dont_write_bytecode - - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildPyTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build_scripts.py b/Lib/distutils/tests/test_build_scripts.py deleted file mode 100644 index f299e51ef79fac..00000000000000 --- a/Lib/distutils/tests/test_build_scripts.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Tests for distutils.command.build_scripts.""" - -import os -import unittest - -from distutils.command.build_scripts import build_scripts -from distutils.core import Distribution -from distutils import sysconfig - -from distutils.tests import support -from test.support import run_unittest - - -class BuildScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_default_settings(self): - cmd = self.get_build_scripts_cmd("/foo/bar", []) - self.assertFalse(cmd.force) - self.assertIsNone(cmd.build_dir) - - cmd.finalize_options() - - self.assertTrue(cmd.force) - self.assertEqual(cmd.build_dir, "/foo/bar") - - def test_build(self): - source = self.mkdtemp() - target = self.mkdtemp() - expected = self.write_sample_scripts(source) - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) - cmd.finalize_options() - cmd.run() - - built = os.listdir(target) - for name in expected: - self.assertIn(name, built) - - def get_build_scripts_cmd(self, target, scripts): - import sys - dist = Distribution() - dist.scripts = scripts - dist.command_obj["build"] = support.DummyCommand( - build_scripts=target, - force=1, - executable=sys.executable - ) - return build_scripts(dist) - - def write_sample_scripts(self, dir): - expected = [] - expected.append("script1.py") - self.write_script(dir, "script1.py", - ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - expected.append("script2.py") - self.write_script(dir, "script2.py", - ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - expected.append("shell.sh") - self.write_script(dir, "shell.sh", - ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) - return expected - - def write_script(self, dir, name, text): - f = open(os.path.join(dir, name), "w") - try: - f.write(text) - finally: - f.close() - - def test_version_int(self): - source = self.mkdtemp() - target = self.mkdtemp() - expected = self.write_sample_scripts(source) - - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) - cmd.finalize_options() - - # http://bugs.python.org/issue4524 - # - # On linux-g++-32 with command line `./configure --enable-ipv6 - # --with-suffix=3`, python is compiled okay but the build scripts - # failed when writing the name of the executable - old = sysconfig.get_config_vars().get('VERSION') - sysconfig._config_vars['VERSION'] = 4 - try: - cmd.run() - finally: - if old is not None: - sysconfig._config_vars['VERSION'] = old - - built = os.listdir(target) - for name in expected: - self.assertIn(name, built) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildScriptsTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_check.py b/Lib/distutils/tests/test_check.py deleted file mode 100644 index 91bcdceb43bc69..00000000000000 --- a/Lib/distutils/tests/test_check.py +++ /dev/null @@ -1,163 +0,0 @@ -"""Tests for distutils.command.check.""" -import os -import textwrap -import unittest -from test.support import run_unittest - -from distutils.command.check import check, HAS_DOCUTILS -from distutils.tests import support -from distutils.errors import DistutilsSetupError - -try: - import pygments -except ImportError: - pygments = None - - -HERE = os.path.dirname(__file__) - - -class CheckTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): - - def _run(self, metadata=None, cwd=None, **options): - if metadata is None: - metadata = {} - if cwd is not None: - old_dir = os.getcwd() - os.chdir(cwd) - pkg_info, dist = self.create_dist(**metadata) - cmd = check(dist) - cmd.initialize_options() - for name, value in options.items(): - setattr(cmd, name, value) - cmd.ensure_finalized() - cmd.run() - if cwd is not None: - os.chdir(old_dir) - return cmd - - def test_check_metadata(self): - # let's run the command with no metadata at all - # by default, check is checking the metadata - # should have some warnings - cmd = self._run() - self.assertEqual(cmd._warnings, 2) - - # now let's add the required fields - # and run it again, to make sure we don't get - # any warning anymore - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} - cmd = self._run(metadata) - self.assertEqual(cmd._warnings, 0) - - # now with the strict mode, we should - # get an error if there are missing metadata - self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1}) - - # and of course, no error when all metadata are present - cmd = self._run(metadata, strict=1) - self.assertEqual(cmd._warnings, 0) - - # now a test with non-ASCII characters - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} - cmd = self._run(metadata) - self.assertEqual(cmd._warnings, 0) - - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") - def test_check_document(self): - pkg_info, dist = self.create_dist() - cmd = check(dist) - - # let's see if it detects broken rest - broken_rest = 'title\n===\n\ntest' - msgs = cmd._check_rst_data(broken_rest) - self.assertEqual(len(msgs), 1) - - # and non-broken rest - rest = 'title\n=====\n\ntest' - msgs = cmd._check_rst_data(rest) - self.assertEqual(len(msgs), 0) - - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") - def test_check_restructuredtext(self): - # let's see if it detects broken rest in long_description - broken_rest = 'title\n===\n\ntest' - pkg_info, dist = self.create_dist(long_description=broken_rest) - cmd = check(dist) - cmd.check_restructuredtext() - self.assertEqual(cmd._warnings, 1) - - # let's see if we have an error with strict=1 - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': broken_rest} - self.assertRaises(DistutilsSetupError, self._run, metadata, - **{'strict': 1, 'restructuredtext': 1}) - - # and non-broken rest, including a non-ASCII character to test #12114 - metadata['long_description'] = 'title\n=====\n\ntest \u00df' - cmd = self._run(metadata, strict=1, restructuredtext=1) - self.assertEqual(cmd._warnings, 0) - - # check that includes work to test #31292 - metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst' - cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1) - self.assertEqual(cmd._warnings, 0) - - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") - def test_check_restructuredtext_with_syntax_highlight(self): - # Don't fail if there is a `code` or `code-block` directive - - example_rst_docs = [] - example_rst_docs.append(textwrap.dedent("""\ - Here's some code: - - .. code:: python - - def foo(): - pass - """)) - example_rst_docs.append(textwrap.dedent("""\ - Here's some code: - - .. code-block:: python - - def foo(): - pass - """)) - - for rest_with_code in example_rst_docs: - pkg_info, dist = self.create_dist(long_description=rest_with_code) - cmd = check(dist) - cmd.check_restructuredtext() - msgs = cmd._check_rst_data(rest_with_code) - if pygments is not None: - self.assertEqual(len(msgs), 0) - else: - self.assertEqual(len(msgs), 1) - self.assertEqual( - str(msgs[0][1]), - 'Cannot analyze code. Pygments package not found.' - ) - - def test_check_all(self): - - metadata = {'url': 'xxx', 'author': 'xxx'} - self.assertRaises(DistutilsSetupError, self._run, - {}, **{'strict': 1, - 'restructuredtext': 1}) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CheckTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_clean.py b/Lib/distutils/tests/test_clean.py deleted file mode 100644 index 92367499cefc04..00000000000000 --- a/Lib/distutils/tests/test_clean.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Tests for distutils.command.clean.""" -import os -import unittest - -from distutils.command.clean import clean -from distutils.tests import support -from test.support import run_unittest - -class cleanTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_simple_run(self): - pkg_dir, dist = self.create_dist() - cmd = clean(dist) - - # let's add some elements clean should remove - dirs = [(d, os.path.join(pkg_dir, d)) - for d in ('build_temp', 'build_lib', 'bdist_base', - 'build_scripts', 'build_base')] - - for name, path in dirs: - os.mkdir(path) - setattr(cmd, name, path) - if name == 'build_base': - continue - for f in ('one', 'two', 'three'): - self.write_file(os.path.join(path, f)) - - # let's run the command - cmd.all = 1 - cmd.ensure_finalized() - cmd.run() - - # make sure the files where removed - for name, path in dirs: - self.assertFalse(os.path.exists(path), - '%s was not removed' % path) - - # let's run the command again (should spit warnings but succeed) - cmd.all = 1 - cmd.ensure_finalized() - cmd.run() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(cleanTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_cmd.py b/Lib/distutils/tests/test_cmd.py deleted file mode 100644 index 2319214a9e332b..00000000000000 --- a/Lib/distutils/tests/test_cmd.py +++ /dev/null @@ -1,126 +0,0 @@ -"""Tests for distutils.cmd.""" -import unittest -import os -from test.support import captured_stdout, run_unittest - -from distutils.cmd import Command -from distutils.dist import Distribution -from distutils.errors import DistutilsOptionError -from distutils import debug - -class MyCmd(Command): - def initialize_options(self): - pass - -class CommandTestCase(unittest.TestCase): - - def setUp(self): - dist = Distribution() - self.cmd = MyCmd(dist) - - def test_ensure_string_list(self): - - cmd = self.cmd - cmd.not_string_list = ['one', 2, 'three'] - cmd.yes_string_list = ['one', 'two', 'three'] - cmd.not_string_list2 = object() - cmd.yes_string_list2 = 'ok' - cmd.ensure_string_list('yes_string_list') - cmd.ensure_string_list('yes_string_list2') - - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list') - - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list2') - - cmd.option1 = 'ok,dok' - cmd.ensure_string_list('option1') - self.assertEqual(cmd.option1, ['ok', 'dok']) - - cmd.option2 = ['xxx', 'www'] - cmd.ensure_string_list('option2') - - cmd.option3 = ['ok', 2] - self.assertRaises(DistutilsOptionError, cmd.ensure_string_list, - 'option3') - - - def test_make_file(self): - - cmd = self.cmd - - # making sure it raises when infiles is not a string or a list/tuple - self.assertRaises(TypeError, cmd.make_file, - infiles=1, outfile='', func='func', args=()) - - # making sure execute gets called properly - def _execute(func, args, exec_msg, level): - self.assertEqual(exec_msg, 'generating out from in') - cmd.force = True - cmd.execute = _execute - cmd.make_file(infiles='in', outfile='out', func='func', args=()) - - def test_dump_options(self): - - msgs = [] - def _announce(msg, level): - msgs.append(msg) - cmd = self.cmd - cmd.announce = _announce - cmd.option1 = 1 - cmd.option2 = 1 - cmd.user_options = [('option1', '', ''), ('option2', '', '')] - cmd.dump_options() - - wanted = ["command options for 'MyCmd':", ' option1 = 1', - ' option2 = 1'] - self.assertEqual(msgs, wanted) - - def test_ensure_string(self): - cmd = self.cmd - cmd.option1 = 'ok' - cmd.ensure_string('option1') - - cmd.option2 = None - cmd.ensure_string('option2', 'xxx') - self.assertTrue(hasattr(cmd, 'option2')) - - cmd.option3 = 1 - self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3') - - def test_ensure_filename(self): - cmd = self.cmd - cmd.option1 = __file__ - cmd.ensure_filename('option1') - cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2') - - def test_ensure_dirname(self): - cmd = self.cmd - cmd.option1 = os.path.dirname(__file__) or os.curdir - cmd.ensure_dirname('option1') - cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2') - - def test_debug_print(self): - cmd = self.cmd - with captured_stdout() as stdout: - cmd.debug_print('xxx') - stdout.seek(0) - self.assertEqual(stdout.read(), '') - - debug.DEBUG = True - try: - with captured_stdout() as stdout: - cmd.debug_print('xxx') - stdout.seek(0) - self.assertEqual(stdout.read(), 'xxx\n') - finally: - debug.DEBUG = False - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CommandTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_config.py b/Lib/distutils/tests/test_config.py deleted file mode 100644 index 8ab70efb161cbd..00000000000000 --- a/Lib/distutils/tests/test_config.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Tests for distutils.pypirc.pypirc.""" -import os -import unittest - -from distutils.core import PyPIRCCommand -from distutils.core import Distribution -from distutils.log import set_threshold -from distutils.log import WARN - -from distutils.tests import support -from test.support import run_unittest - -PYPIRC = """\ -[distutils] - -index-servers = - server1 - server2 - server3 - -[server1] -username:me -password:secret - -[server2] -username:meagain -password: secret -realm:acme -repository:http://another.pypi/ - -[server3] -username:cbiggles -password:yh^%#rest-of-my-password -""" - -PYPIRC_OLD = """\ -[server-login] -username:tarek -password:secret -""" - -WANTED = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:tarek -password:xxx -""" - - -class BasePyPIRCCommandTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - """Patches the environment.""" - super(BasePyPIRCCommandTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - os.environ['HOME'] = self.tmp_dir - os.environ['USERPROFILE'] = self.tmp_dir - self.rc = os.path.join(self.tmp_dir, '.pypirc') - self.dist = Distribution() - - class command(PyPIRCCommand): - def __init__(self, dist): - PyPIRCCommand.__init__(self, dist) - def initialize_options(self): - pass - finalize_options = initialize_options - - self._cmd = command - self.old_threshold = set_threshold(WARN) - - def tearDown(self): - """Removes the patch.""" - set_threshold(self.old_threshold) - super(BasePyPIRCCommandTestCase, self).tearDown() - - -class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): - - def test_server_registration(self): - # This test makes sure PyPIRCCommand knows how to: - # 1. handle several sections in .pypirc - # 2. handle the old format - - # new format - self.write_file(self.rc, PYPIRC) - cmd = self._cmd(self.dist) - config = cmd._read_pypirc() - - config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server1'), ('username', 'me')] - self.assertEqual(config, waited) - - # old format - self.write_file(self.rc, PYPIRC_OLD) - config = cmd._read_pypirc() - config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server-login'), ('username', 'tarek')] - self.assertEqual(config, waited) - - def test_server_empty_registration(self): - cmd = self._cmd(self.dist) - rc = cmd._get_rc_file() - self.assertFalse(os.path.exists(rc)) - cmd._store_pypirc('tarek', 'xxx') - self.assertTrue(os.path.exists(rc)) - f = open(rc) - try: - content = f.read() - self.assertEqual(content, WANTED) - finally: - f.close() - - def test_config_interpolation(self): - # using the % character in .pypirc should not raise an error (#20120) - self.write_file(self.rc, PYPIRC) - cmd = self._cmd(self.dist) - cmd.repository = 'server3' - config = cmd._read_pypirc() - - config = list(sorted(config.items())) - waited = [('password', 'yh^%#rest-of-my-password'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server3'), ('username', 'cbiggles')] - self.assertEqual(config, waited) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(PyPIRCCommandTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_config_cmd.py b/Lib/distutils/tests/test_config_cmd.py deleted file mode 100644 index c79db68aae115d..00000000000000 --- a/Lib/distutils/tests/test_config_cmd.py +++ /dev/null @@ -1,103 +0,0 @@ -"""Tests for distutils.command.config.""" -import unittest -import os -import sys -import sysconfig -from test.support import ( - run_unittest, missing_compiler_executable, requires_subprocess -) - -from distutils.command.config import dump_file, config -from distutils.tests import support -from distutils import log - -class ConfigTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): - - def _info(self, msg, *args): - for line in msg.splitlines(): - self._logs.append(line) - - def setUp(self): - super(ConfigTestCase, self).setUp() - self._logs = [] - self.old_log = log.info - log.info = self._info - self.old_config_vars = dict(sysconfig._CONFIG_VARS) - - def tearDown(self): - log.info = self.old_log - sysconfig._CONFIG_VARS.clear() - sysconfig._CONFIG_VARS.update(self.old_config_vars) - super(ConfigTestCase, self).tearDown() - - def test_dump_file(self): - this_file = os.path.splitext(__file__)[0] + '.py' - f = open(this_file) - try: - numlines = len(f.readlines()) - finally: - f.close() - - dump_file(this_file, 'I am the header') - self.assertEqual(len(self._logs), numlines+1) - - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") - @requires_subprocess() - def test_search_cpp(self): - cmd = missing_compiler_executable(['preprocessor']) - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd._check_compiler() - compiler = cmd.compiler - if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower(): - self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options') - - # simple pattern searches - match = cmd.search_cpp(pattern='xxx', body='/* xxx */') - self.assertEqual(match, 0) - - match = cmd.search_cpp(pattern='_configtest', body='/* xxx */') - self.assertEqual(match, 1) - - def test_finalize_options(self): - # finalize_options does a bit of transformation - # on options - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd.include_dirs = 'one%stwo' % os.pathsep - cmd.libraries = 'one' - cmd.library_dirs = 'three%sfour' % os.pathsep - cmd.ensure_finalized() - - self.assertEqual(cmd.include_dirs, ['one', 'two']) - self.assertEqual(cmd.libraries, ['one']) - self.assertEqual(cmd.library_dirs, ['three', 'four']) - - def test_clean(self): - # _clean removes files - tmp_dir = self.mkdtemp() - f1 = os.path.join(tmp_dir, 'one') - f2 = os.path.join(tmp_dir, 'two') - - self.write_file(f1, 'xxx') - self.write_file(f2, 'xxx') - - for f in (f1, f2): - self.assertTrue(os.path.exists(f)) - - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd._clean(f1, f2) - - for f in (f1, f2): - self.assertFalse(os.path.exists(f)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_core.py b/Lib/distutils/tests/test_core.py deleted file mode 100644 index 700a22da045d45..00000000000000 --- a/Lib/distutils/tests/test_core.py +++ /dev/null @@ -1,140 +0,0 @@ -"""Tests for distutils.core.""" - -import io -import distutils.core -import os -import shutil -import sys -from test.support import captured_stdout, run_unittest -from test.support import os_helper -import unittest -from distutils.tests import support -from distutils import log - -# setup script that uses __file__ -setup_using___file__ = """\ - -__file__ - -from distutils.core import setup -setup() -""" - -setup_prints_cwd = """\ - -import os -print(os.getcwd()) - -from distutils.core import setup -setup() -""" - -setup_does_nothing = """\ -from distutils.core import setup -setup() -""" - - -setup_defines_subclass = """\ -from distutils.core import setup -from distutils.command.install import install as _install - -class install(_install): - sub_commands = _install.sub_commands + ['cmd'] - -setup(cmdclass={'install': install}) -""" - -class CoreTestCase(support.EnvironGuard, unittest.TestCase): - - def setUp(self): - super(CoreTestCase, self).setUp() - self.old_stdout = sys.stdout - self.cleanup_testfn() - self.old_argv = sys.argv, sys.argv[:] - self.addCleanup(log.set_threshold, log._global_log.threshold) - - def tearDown(self): - sys.stdout = self.old_stdout - self.cleanup_testfn() - sys.argv = self.old_argv[0] - sys.argv[:] = self.old_argv[1] - super(CoreTestCase, self).tearDown() - - def cleanup_testfn(self): - path = os_helper.TESTFN - if os.path.isfile(path): - os.remove(path) - elif os.path.isdir(path): - shutil.rmtree(path) - - def write_setup(self, text, path=os_helper.TESTFN): - f = open(path, "w") - try: - f.write(text) - finally: - f.close() - return path - - def test_run_setup_provides_file(self): - # Make sure the script can use __file__; if that's missing, the test - # setup.py script will raise NameError. - distutils.core.run_setup( - self.write_setup(setup_using___file__)) - - def test_run_setup_preserves_sys_argv(self): - # Make sure run_setup does not clobber sys.argv - argv_copy = sys.argv.copy() - distutils.core.run_setup( - self.write_setup(setup_does_nothing)) - self.assertEqual(sys.argv, argv_copy) - - def test_run_setup_defines_subclass(self): - # Make sure the script can use __file__; if that's missing, the test - # setup.py script will raise NameError. - dist = distutils.core.run_setup( - self.write_setup(setup_defines_subclass)) - install = dist.get_command_obj('install') - self.assertIn('cmd', install.sub_commands) - - def test_run_setup_uses_current_dir(self): - # This tests that the setup script is run with the current directory - # as its own current directory; this was temporarily broken by a - # previous patch when TESTFN did not use the current directory. - sys.stdout = io.StringIO() - cwd = os.getcwd() - - # Create a directory and write the setup.py file there: - os.mkdir(os_helper.TESTFN) - setup_py = os.path.join(os_helper.TESTFN, "setup.py") - distutils.core.run_setup( - self.write_setup(setup_prints_cwd, path=setup_py)) - - output = sys.stdout.getvalue() - if output.endswith("\n"): - output = output[:-1] - self.assertEqual(cwd, output) - - def test_debug_mode(self): - # this covers the code called when DEBUG is set - sys.argv = ['setup.py', '--name'] - with captured_stdout() as stdout: - distutils.core.setup(name='bar') - stdout.seek(0) - self.assertEqual(stdout.read(), 'bar\n') - - distutils.core.DEBUG = True - try: - with captured_stdout() as stdout: - distutils.core.setup(name='bar') - finally: - distutils.core.DEBUG = False - stdout.seek(0) - wanted = "options (after parsing config files):\n" - self.assertEqual(stdout.readlines()[0], wanted) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CoreTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_cygwinccompiler.py b/Lib/distutils/tests/test_cygwinccompiler.py deleted file mode 100644 index 0912ffd15c8ee9..00000000000000 --- a/Lib/distutils/tests/test_cygwinccompiler.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Tests for distutils.cygwinccompiler.""" -import unittest -import sys -import os -from io import BytesIO -from test.support import run_unittest - -from distutils import cygwinccompiler -from distutils.cygwinccompiler import (check_config_h, - CONFIG_H_OK, CONFIG_H_NOTOK, - CONFIG_H_UNCERTAIN, get_versions, - get_msvcr) -from distutils.tests import support - -class FakePopen(object): - test_class = None - - def __init__(self, cmd, shell, stdout): - self.cmd = cmd.split()[0] - exes = self.test_class._exes - if self.cmd in exes: - # issue #6438 in Python 3.x, Popen returns bytes - self.stdout = BytesIO(exes[self.cmd]) - else: - self.stdout = os.popen(cmd, 'r') - - -class CygwinCCompilerTestCase(support.TempdirManager, - unittest.TestCase): - - def setUp(self): - super(CygwinCCompilerTestCase, self).setUp() - self.version = sys.version - self.python_h = os.path.join(self.mkdtemp(), 'python.h') - from distutils import sysconfig - self.old_get_config_h_filename = sysconfig.get_config_h_filename - sysconfig.get_config_h_filename = self._get_config_h_filename - self.old_find_executable = cygwinccompiler.find_executable - cygwinccompiler.find_executable = self._find_executable - self._exes = {} - self.old_popen = cygwinccompiler.Popen - FakePopen.test_class = self - cygwinccompiler.Popen = FakePopen - - def tearDown(self): - sys.version = self.version - from distutils import sysconfig - sysconfig.get_config_h_filename = self.old_get_config_h_filename - cygwinccompiler.find_executable = self.old_find_executable - cygwinccompiler.Popen = self.old_popen - super(CygwinCCompilerTestCase, self).tearDown() - - def _get_config_h_filename(self): - return self.python_h - - def _find_executable(self, name): - if name in self._exes: - return name - return None - - def test_check_config_h(self): - - # check_config_h looks for "GCC" in sys.version first - # returns CONFIG_H_OK if found - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' - '4.0.1 (Apple Computer, Inc. build 5370)]') - - self.assertEqual(check_config_h()[0], CONFIG_H_OK) - - # then it tries to see if it can find "__GNUC__" in pyconfig.h - sys.version = 'something without the *CC word' - - # if the file doesn't exist it returns CONFIG_H_UNCERTAIN - self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN) - - # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK - self.write_file(self.python_h, 'xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK) - - # and CONFIG_H_OK if __GNUC__ is found - self.write_file(self.python_h, 'xxx __GNUC__ xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_OK) - - def test_get_versions(self): - - # get_versions calls distutils.spawn.find_executable on - # 'gcc', 'ld' and 'dllwrap' - self.assertEqual(get_versions(), (None, None, None)) - - # Let's fake we have 'gcc' and it returns '3.4.5' - self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF' - res = get_versions() - self.assertEqual(str(res[0]), '3.4.5') - - # and let's see what happens when the version - # doesn't match the regular expression - # (\d+\.\d+(\.\d+)*) - self._exes['gcc'] = b'very strange output' - res = get_versions() - self.assertEqual(res[0], None) - - # same thing for ld - self._exes['ld'] = b'GNU ld version 2.17.50 20060824' - res = get_versions() - self.assertEqual(str(res[1]), '2.17.50') - self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77' - res = get_versions() - self.assertEqual(res[1], None) - - # and dllwrap - self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF' - res = get_versions() - self.assertEqual(str(res[2]), '2.17.50') - self._exes['dllwrap'] = b'Cheese Wrap' - res = get_versions() - self.assertEqual(res[2], None) - - def test_get_msvcr(self): - - # none - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') - self.assertEqual(get_msvcr(), None) - - # MSVC 7.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1300 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr70']) - - # MSVC 7.1 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1310 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr71']) - - # VS2005 / MSVC 8.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1400 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr80']) - - # VS2008 / MSVC 9.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1500 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr90']) - - # unknown - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1999 32 bits (Intel)]') - self.assertRaises(ValueError, get_msvcr) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CygwinCCompilerTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_dep_util.py b/Lib/distutils/tests/test_dep_util.py deleted file mode 100644 index 0d52740a9edda3..00000000000000 --- a/Lib/distutils/tests/test_dep_util.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Tests for distutils.dep_util.""" -import unittest -import os - -from distutils.dep_util import newer, newer_pairwise, newer_group -from distutils.errors import DistutilsFileError -from distutils.tests import support -from test.support import run_unittest - -class DepUtilTestCase(support.TempdirManager, unittest.TestCase): - - def test_newer(self): - - tmpdir = self.mkdtemp() - new_file = os.path.join(tmpdir, 'new') - old_file = os.path.abspath(__file__) - - # Raise DistutilsFileError if 'new_file' does not exist. - self.assertRaises(DistutilsFileError, newer, new_file, old_file) - - # Return true if 'new_file' exists and is more recently modified than - # 'old_file', or if 'new_file' exists and 'old_file' doesn't. - self.write_file(new_file) - self.assertTrue(newer(new_file, 'I_dont_exist')) - self.assertTrue(newer(new_file, old_file)) - - # Return false if both exist and 'old_file' is the same age or younger - # than 'new_file'. - self.assertFalse(newer(old_file, new_file)) - - def test_newer_pairwise(self): - tmpdir = self.mkdtemp() - sources = os.path.join(tmpdir, 'sources') - targets = os.path.join(tmpdir, 'targets') - os.mkdir(sources) - os.mkdir(targets) - one = os.path.join(sources, 'one') - two = os.path.join(sources, 'two') - three = os.path.abspath(__file__) # I am the old file - four = os.path.join(targets, 'four') - self.write_file(one) - self.write_file(two) - self.write_file(four) - - self.assertEqual(newer_pairwise([one, two], [three, four]), - ([one],[three])) - - def test_newer_group(self): - tmpdir = self.mkdtemp() - sources = os.path.join(tmpdir, 'sources') - os.mkdir(sources) - one = os.path.join(sources, 'one') - two = os.path.join(sources, 'two') - three = os.path.join(sources, 'three') - old_file = os.path.abspath(__file__) - - # return true if 'old_file' is out-of-date with respect to any file - # listed in 'sources'. - self.write_file(one) - self.write_file(two) - self.write_file(three) - self.assertTrue(newer_group([one, two, three], old_file)) - self.assertFalse(newer_group([one, two, old_file], three)) - - # missing handling - os.remove(one) - self.assertRaises(OSError, newer_group, [one, two, old_file], three) - - self.assertFalse(newer_group([one, two, old_file], three, - missing='ignore')) - - self.assertTrue(newer_group([one, two, old_file], three, - missing='newer')) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(DepUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_dir_util.py b/Lib/distutils/tests/test_dir_util.py deleted file mode 100644 index ebd89f320dca89..00000000000000 --- a/Lib/distutils/tests/test_dir_util.py +++ /dev/null @@ -1,143 +0,0 @@ -"""Tests for distutils.dir_util.""" -import unittest -import os -import stat -import sys -from unittest.mock import patch - -from distutils import dir_util, errors -from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree, - ensure_relative) - -from distutils import log -from distutils.tests import support -from test.support import run_unittest, is_emscripten, is_wasi - - -class DirUtilTestCase(support.TempdirManager, unittest.TestCase): - - def _log(self, msg, *args): - if len(args) > 0: - self._logs.append(msg % args) - else: - self._logs.append(msg) - - def setUp(self): - super(DirUtilTestCase, self).setUp() - self._logs = [] - tmp_dir = self.mkdtemp() - self.root_target = os.path.join(tmp_dir, 'deep') - self.target = os.path.join(self.root_target, 'here') - self.target2 = os.path.join(tmp_dir, 'deep2') - self.old_log = log.info - log.info = self._log - - def tearDown(self): - log.info = self.old_log - super(DirUtilTestCase, self).tearDown() - - def test_mkpath_remove_tree_verbosity(self): - - mkpath(self.target, verbose=0) - wanted = [] - self.assertEqual(self._logs, wanted) - remove_tree(self.root_target, verbose=0) - - mkpath(self.target, verbose=1) - wanted = ['creating %s' % self.root_target, - 'creating %s' % self.target] - self.assertEqual(self._logs, wanted) - self._logs = [] - - remove_tree(self.root_target, verbose=1) - wanted = ["removing '%s' (and everything under it)" % self.root_target] - self.assertEqual(self._logs, wanted) - - @unittest.skipIf(sys.platform.startswith('win'), - "This test is only appropriate for POSIX-like systems.") - @unittest.skipIf( - is_emscripten or is_wasi, - "Emscripten's/WASI's umask is a stub." - ) - def test_mkpath_with_custom_mode(self): - # Get and set the current umask value for testing mode bits. - umask = os.umask(0o002) - os.umask(umask) - mkpath(self.target, 0o700) - self.assertEqual( - stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask) - mkpath(self.target2, 0o555) - self.assertEqual( - stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask) - - def test_create_tree_verbosity(self): - - create_tree(self.root_target, ['one', 'two', 'three'], verbose=0) - self.assertEqual(self._logs, []) - remove_tree(self.root_target, verbose=0) - - wanted = ['creating %s' % self.root_target] - create_tree(self.root_target, ['one', 'two', 'three'], verbose=1) - self.assertEqual(self._logs, wanted) - - remove_tree(self.root_target, verbose=0) - - def test_copy_tree_verbosity(self): - - mkpath(self.target, verbose=0) - - copy_tree(self.target, self.target2, verbose=0) - self.assertEqual(self._logs, []) - - remove_tree(self.root_target, verbose=0) - - mkpath(self.target, verbose=0) - a_file = os.path.join(self.target, 'ok.txt') - with open(a_file, 'w') as f: - f.write('some content') - - wanted = ['copying %s -> %s' % (a_file, self.target2)] - copy_tree(self.target, self.target2, verbose=1) - self.assertEqual(self._logs, wanted) - - remove_tree(self.root_target, verbose=0) - remove_tree(self.target2, verbose=0) - - def test_copy_tree_skips_nfs_temp_files(self): - mkpath(self.target, verbose=0) - - a_file = os.path.join(self.target, 'ok.txt') - nfs_file = os.path.join(self.target, '.nfs123abc') - for f in a_file, nfs_file: - with open(f, 'w') as fh: - fh.write('some content') - - copy_tree(self.target, self.target2) - self.assertEqual(os.listdir(self.target2), ['ok.txt']) - - remove_tree(self.root_target, verbose=0) - remove_tree(self.target2, verbose=0) - - def test_ensure_relative(self): - if os.sep == '/': - self.assertEqual(ensure_relative('/home/foo'), 'home/foo') - self.assertEqual(ensure_relative('some/path'), 'some/path') - else: # \\ - self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo') - self.assertEqual(ensure_relative('home\\foo'), 'home\\foo') - - def test_copy_tree_exception_in_listdir(self): - """ - An exception in listdir should raise a DistutilsFileError - """ - with patch("os.listdir", side_effect=OSError()), \ - self.assertRaises(errors.DistutilsFileError): - src = self.tempdirs[-1] - dir_util.copy_tree(src, None) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(DirUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_dist.py b/Lib/distutils/tests/test_dist.py deleted file mode 100644 index 2ef70d987f36bb..00000000000000 --- a/Lib/distutils/tests/test_dist.py +++ /dev/null @@ -1,529 +0,0 @@ -"""Tests for distutils.dist.""" -import os -import io -import sys -import unittest -import warnings -import textwrap - -from unittest import mock - -from distutils.dist import Distribution, fix_help_options -from distutils.cmd import Command - -from test.support import ( - captured_stdout, captured_stderr, run_unittest -) -from test.support.os_helper import TESTFN -from distutils.tests import support -from distutils import log - - -class test_dist(Command): - """Sample distutils extension command.""" - - user_options = [ - ("sample-option=", "S", "help text"), - ] - - def initialize_options(self): - self.sample_option = None - - -class TestDistribution(Distribution): - """Distribution subclasses that avoids the default search for - configuration files. - - The ._config_files attribute must be set before - .parse_config_files() is called. - """ - - def find_config_files(self): - return self._config_files - - -class DistributionTestCase(support.LoggingSilencer, - support.TempdirManager, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(DistributionTestCase, self).setUp() - self.argv = sys.argv, sys.argv[:] - del sys.argv[1:] - - def tearDown(self): - sys.argv = self.argv[0] - sys.argv[:] = self.argv[1] - super(DistributionTestCase, self).tearDown() - - def create_distribution(self, configfiles=()): - d = TestDistribution() - d._config_files = configfiles - d.parse_config_files() - d.parse_command_line() - return d - - def test_command_packages_unspecified(self): - sys.argv.append("build") - d = self.create_distribution() - self.assertEqual(d.get_command_packages(), ["distutils.command"]) - - def test_command_packages_cmdline(self): - from distutils.tests.test_dist import test_dist - sys.argv.extend(["--command-packages", - "foo.bar,distutils.tests", - "test_dist", - "-Ssometext", - ]) - d = self.create_distribution() - # let's actually try to load our test command: - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "distutils.tests"]) - cmd = d.get_command_obj("test_dist") - self.assertIsInstance(cmd, test_dist) - self.assertEqual(cmd.sample_option, "sometext") - - def test_venv_install_options(self): - sys.argv.append("install") - self.addCleanup(os.unlink, TESTFN) - - fakepath = '/somedir' - - with open(TESTFN, "w") as f: - print(("[install]\n" - "install-base = {0}\n" - "install-platbase = {0}\n" - "install-lib = {0}\n" - "install-platlib = {0}\n" - "install-purelib = {0}\n" - "install-headers = {0}\n" - "install-scripts = {0}\n" - "install-data = {0}\n" - "prefix = {0}\n" - "exec-prefix = {0}\n" - "home = {0}\n" - "user = {0}\n" - "root = {0}").format(fakepath), file=f) - - # Base case: Not in a Virtual Environment - with mock.patch.multiple(sys, prefix='/a', base_prefix='/a') as values: - d = self.create_distribution([TESTFN]) - - option_tuple = (TESTFN, fakepath) - - result_dict = { - 'install_base': option_tuple, - 'install_platbase': option_tuple, - 'install_lib': option_tuple, - 'install_platlib': option_tuple, - 'install_purelib': option_tuple, - 'install_headers': option_tuple, - 'install_scripts': option_tuple, - 'install_data': option_tuple, - 'prefix': option_tuple, - 'exec_prefix': option_tuple, - 'home': option_tuple, - 'user': option_tuple, - 'root': option_tuple, - } - - self.assertEqual( - sorted(d.command_options.get('install').keys()), - sorted(result_dict.keys())) - - for (key, value) in d.command_options.get('install').items(): - self.assertEqual(value, result_dict[key]) - - # Test case: In a Virtual Environment - with mock.patch.multiple(sys, prefix='/a', base_prefix='/b') as values: - d = self.create_distribution([TESTFN]) - - for key in result_dict.keys(): - self.assertNotIn(key, d.command_options.get('install', {})) - - def test_command_packages_configfile(self): - sys.argv.append("build") - self.addCleanup(os.unlink, TESTFN) - f = open(TESTFN, "w") - try: - print("[global]", file=f) - print("command_packages = foo.bar, splat", file=f) - finally: - f.close() - - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "splat"]) - - # ensure command line overrides config: - sys.argv[1:] = ["--command-packages", "spork", "build"] - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "spork"]) - - # Setting --command-packages to '' should cause the default to - # be used even if a config file specified something else: - sys.argv[1:] = ["--command-packages", "", "build"] - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), ["distutils.command"]) - - def test_empty_options(self): - # an empty options dictionary should not stay in the - # list of attributes - - # catching warnings - warns = [] - - def _warn(msg): - warns.append(msg) - - self.addCleanup(setattr, warnings, 'warn', warnings.warn) - warnings.warn = _warn - dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx', - 'version': 'xxx', 'url': 'xxxx', - 'options': {}}) - - self.assertEqual(len(warns), 0) - self.assertNotIn('options', dir(dist)) - - def test_finalize_options(self): - attrs = {'keywords': 'one,two', - 'platforms': 'one,two'} - - dist = Distribution(attrs=attrs) - dist.finalize_options() - - # finalize_option splits platforms and keywords - self.assertEqual(dist.metadata.platforms, ['one', 'two']) - self.assertEqual(dist.metadata.keywords, ['one', 'two']) - - attrs = {'keywords': 'foo bar', - 'platforms': 'foo bar'} - dist = Distribution(attrs=attrs) - dist.finalize_options() - self.assertEqual(dist.metadata.platforms, ['foo bar']) - self.assertEqual(dist.metadata.keywords, ['foo bar']) - - def test_get_command_packages(self): - dist = Distribution() - self.assertEqual(dist.command_packages, None) - cmds = dist.get_command_packages() - self.assertEqual(cmds, ['distutils.command']) - self.assertEqual(dist.command_packages, - ['distutils.command']) - - dist.command_packages = 'one,two' - cmds = dist.get_command_packages() - self.assertEqual(cmds, ['distutils.command', 'one', 'two']) - - def test_announce(self): - # make sure the level is known - dist = Distribution() - args = ('ok',) - kwargs = {'level': 'ok2'} - self.assertRaises(ValueError, dist.announce, args, kwargs) - - - def test_find_config_files_disable(self): - # Ticket #1180: Allow user to disable their home config file. - temp_home = self.mkdtemp() - if os.name == 'posix': - user_filename = os.path.join(temp_home, ".pydistutils.cfg") - else: - user_filename = os.path.join(temp_home, "pydistutils.cfg") - - with open(user_filename, 'w') as f: - f.write('[distutils]\n') - - def _expander(path): - return temp_home - - old_expander = os.path.expanduser - os.path.expanduser = _expander - try: - d = Distribution() - all_files = d.find_config_files() - - d = Distribution(attrs={'script_args': ['--no-user-cfg']}) - files = d.find_config_files() - finally: - os.path.expanduser = old_expander - - # make sure --no-user-cfg disables the user cfg file - self.assertEqual(len(all_files)-1, len(files)) - -class MetadataTestCase(support.TempdirManager, support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(MetadataTestCase, self).setUp() - self.argv = sys.argv, sys.argv[:] - - def tearDown(self): - sys.argv = self.argv[0] - sys.argv[:] = self.argv[1] - super(MetadataTestCase, self).tearDown() - - def format_metadata(self, dist): - sio = io.StringIO() - dist.metadata.write_pkg_file(sio) - return sio.getvalue() - - def test_simple_metadata(self): - attrs = {"name": "package", - "version": "1.0"} - dist = Distribution(attrs) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.0", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertNotIn("requires:", meta.lower()) - self.assertNotIn("obsoletes:", meta.lower()) - - def test_provides(self): - attrs = {"name": "package", - "version": "1.0", - "provides": ["package", "package.sub"]} - dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_provides(), - ["package", "package.sub"]) - self.assertEqual(dist.get_provides(), - ["package", "package.sub"]) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("requires:", meta.lower()) - self.assertNotIn("obsoletes:", meta.lower()) - - def test_provides_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "provides": ["my.pkg (splat)"]}) - - def test_requires(self): - attrs = {"name": "package", - "version": "1.0", - "requires": ["other", "another (==1.0)"]} - dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_requires(), - ["other", "another (==1.0)"]) - self.assertEqual(dist.get_requires(), - ["other", "another (==1.0)"]) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertIn("Requires: other", meta) - self.assertIn("Requires: another (==1.0)", meta) - self.assertNotIn("obsoletes:", meta.lower()) - - def test_requires_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "requires": ["my.pkg (splat)"]}) - - def test_requires_to_list(self): - attrs = {"name": "package", - "requires": iter(["other"])} - dist = Distribution(attrs) - self.assertIsInstance(dist.metadata.requires, list) - - - def test_obsoletes(self): - attrs = {"name": "package", - "version": "1.0", - "obsoletes": ["other", "another (<1.0)"]} - dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_obsoletes(), - ["other", "another (<1.0)"]) - self.assertEqual(dist.get_obsoletes(), - ["other", "another (<1.0)"]) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertNotIn("requires:", meta.lower()) - self.assertIn("Obsoletes: other", meta) - self.assertIn("Obsoletes: another (<1.0)", meta) - - def test_obsoletes_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "obsoletes": ["my.pkg (splat)"]}) - - def test_obsoletes_to_list(self): - attrs = {"name": "package", - "obsoletes": iter(["other"])} - dist = Distribution(attrs) - self.assertIsInstance(dist.metadata.obsoletes, list) - - def test_classifier(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ['Programming Language :: Python :: 3']} - dist = Distribution(attrs) - self.assertEqual(dist.get_classifiers(), - ['Programming Language :: Python :: 3']) - meta = self.format_metadata(dist) - self.assertIn('Metadata-Version: 1.1', meta) - - def test_classifier_invalid_type(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ('Programming Language :: Python :: 3',)} - with captured_stderr() as error: - d = Distribution(attrs) - # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) - # should be converted to a list - self.assertIsInstance(d.metadata.classifiers, list) - self.assertEqual(d.metadata.classifiers, - list(attrs['classifiers'])) - - def test_keywords(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ['spam', 'eggs', 'life of brian']} - dist = Distribution(attrs) - self.assertEqual(dist.get_keywords(), - ['spam', 'eggs', 'life of brian']) - - def test_keywords_invalid_type(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ('spam', 'eggs', 'life of brian')} - with captured_stderr() as error: - d = Distribution(attrs) - # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) - # should be converted to a list - self.assertIsInstance(d.metadata.keywords, list) - self.assertEqual(d.metadata.keywords, list(attrs['keywords'])) - - def test_platforms(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ['GNU/Linux', 'Some Evil Platform']} - dist = Distribution(attrs) - self.assertEqual(dist.get_platforms(), - ['GNU/Linux', 'Some Evil Platform']) - - def test_platforms_invalid_types(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ('GNU/Linux', 'Some Evil Platform')} - with captured_stderr() as error: - d = Distribution(attrs) - # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) - # should be converted to a list - self.assertIsInstance(d.metadata.platforms, list) - self.assertEqual(d.metadata.platforms, list(attrs['platforms'])) - - def test_download_url(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'download_url': 'http://example.org/boa'} - dist = Distribution(attrs) - meta = self.format_metadata(dist) - self.assertIn('Metadata-Version: 1.1', meta) - - def test_long_description(self): - long_desc = textwrap.dedent("""\ - example:: - We start here - and continue here - and end here.""") - attrs = {"name": "package", - "version": "1.0", - "long_description": long_desc} - - dist = Distribution(attrs) - meta = self.format_metadata(dist) - meta = meta.replace('\n' + 8 * ' ', '\n') - self.assertIn(long_desc, meta) - - def test_custom_pydistutils(self): - # fixes #2166 - # make sure pydistutils.cfg is found - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - - temp_dir = self.mkdtemp() - user_filename = os.path.join(temp_dir, user_filename) - f = open(user_filename, 'w') - try: - f.write('.') - finally: - f.close() - - try: - dist = Distribution() - - # linux-style - if sys.platform in ('linux', 'darwin'): - os.environ['HOME'] = temp_dir - files = dist.find_config_files() - self.assertIn(user_filename, files) - - # win32-style - if sys.platform == 'win32': - # home drive should be found - os.environ['USERPROFILE'] = temp_dir - files = dist.find_config_files() - self.assertIn(user_filename, files, - '%r not found in %r' % (user_filename, files)) - finally: - os.remove(user_filename) - - def test_fix_help_options(self): - help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)] - fancy_options = fix_help_options(help_tuples) - self.assertEqual(fancy_options[0], ('a', 'b', 'c')) - self.assertEqual(fancy_options[1], (1, 2, 3)) - - def test_show_help(self): - # smoke test, just makes sure some help is displayed - self.addCleanup(log.set_threshold, log._global_log.threshold) - dist = Distribution() - sys.argv = [] - dist.help = 1 - dist.script_name = 'setup.py' - with captured_stdout() as s: - dist.parse_command_line() - - output = [line for line in s.getvalue().split('\n') - if line.strip() != ''] - self.assertTrue(output) - - - def test_read_metadata(self): - attrs = {"name": "package", - "version": "1.0", - "long_description": "desc", - "description": "xxx", - "download_url": "http://example.com", - "keywords": ['one', 'two'], - "requires": ['foo']} - - dist = Distribution(attrs) - metadata = dist.metadata - - # write it then reloads it - PKG_INFO = io.StringIO() - metadata.write_pkg_file(PKG_INFO) - PKG_INFO.seek(0) - metadata.read_pkg_file(PKG_INFO) - - self.assertEqual(metadata.name, "package") - self.assertEqual(metadata.version, "1.0") - self.assertEqual(metadata.description, "xxx") - self.assertEqual(metadata.download_url, 'http://example.com') - self.assertEqual(metadata.keywords, ['one', 'two']) - self.assertEqual(metadata.platforms, ['UNKNOWN']) - self.assertEqual(metadata.obsoletes, None) - self.assertEqual(metadata.requires, ['foo']) - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DistributionTestCase)) - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MetadataTestCase)) - return suite - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_extension.py b/Lib/distutils/tests/test_extension.py deleted file mode 100644 index 2b08930eafb10a..00000000000000 --- a/Lib/distutils/tests/test_extension.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Tests for distutils.extension.""" -import unittest -import os -import warnings - -from test.support import run_unittest -from test.support.warnings_helper import check_warnings -from distutils.extension import read_setup_file, Extension - -class ExtensionTestCase(unittest.TestCase): - - def test_read_setup_file(self): - # trying to read a Setup file - # (sample extracted from the PyGame project) - setup = os.path.join(os.path.dirname(__file__), 'Setup.sample') - - exts = read_setup_file(setup) - names = [ext.name for ext in exts] - names.sort() - - # here are the extensions read_setup_file should have created - # out of the file - wanted = ['_arraysurfarray', '_camera', '_numericsndarray', - '_numericsurfarray', 'base', 'bufferproxy', 'cdrom', - 'color', 'constants', 'display', 'draw', 'event', - 'fastevent', 'font', 'gfxdraw', 'image', 'imageext', - 'joystick', 'key', 'mask', 'mixer', 'mixer_music', - 'mouse', 'movie', 'overlay', 'pixelarray', 'pypm', - 'rect', 'rwobject', 'scrap', 'surface', 'surflock', - 'time', 'transform'] - - self.assertEqual(names, wanted) - - def test_extension_init(self): - # the first argument, which is the name, must be a string - self.assertRaises(AssertionError, Extension, 1, []) - ext = Extension('name', []) - self.assertEqual(ext.name, 'name') - - # the second argument, which is the list of files, must - # be a list of strings - self.assertRaises(AssertionError, Extension, 'name', 'file') - self.assertRaises(AssertionError, Extension, 'name', ['file', 1]) - ext = Extension('name', ['file1', 'file2']) - self.assertEqual(ext.sources, ['file1', 'file2']) - - # others arguments have defaults - for attr in ('include_dirs', 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'export_symbols', 'swig_opts', 'depends'): - self.assertEqual(getattr(ext, attr), []) - - self.assertEqual(ext.language, None) - self.assertEqual(ext.optional, None) - - # if there are unknown keyword options, warn about them - with check_warnings() as w: - warnings.simplefilter('always') - ext = Extension('name', ['file1', 'file2'], chic=True) - - self.assertEqual(len(w.warnings), 1) - self.assertEqual(str(w.warnings[0].message), - "Unknown Extension options: 'chic'") - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ExtensionTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_file_util.py b/Lib/distutils/tests/test_file_util.py deleted file mode 100644 index 551151b0143661..00000000000000 --- a/Lib/distutils/tests/test_file_util.py +++ /dev/null @@ -1,126 +0,0 @@ -"""Tests for distutils.file_util.""" -import unittest -import os -import errno -from unittest.mock import patch - -from distutils.file_util import move_file, copy_file -from distutils import log -from distutils.tests import support -from distutils.errors import DistutilsFileError -from test.support import run_unittest -from test.support.os_helper import unlink - - -class FileUtilTestCase(support.TempdirManager, unittest.TestCase): - - def _log(self, msg, *args): - if len(args) > 0: - self._logs.append(msg % args) - else: - self._logs.append(msg) - - def setUp(self): - super(FileUtilTestCase, self).setUp() - self._logs = [] - self.old_log = log.info - log.info = self._log - tmp_dir = self.mkdtemp() - self.source = os.path.join(tmp_dir, 'f1') - self.target = os.path.join(tmp_dir, 'f2') - self.target_dir = os.path.join(tmp_dir, 'd1') - - def tearDown(self): - log.info = self.old_log - super(FileUtilTestCase, self).tearDown() - - def test_move_file_verbosity(self): - f = open(self.source, 'w') - try: - f.write('some content') - finally: - f.close() - - move_file(self.source, self.target, verbose=0) - wanted = [] - self.assertEqual(self._logs, wanted) - - # back to original state - move_file(self.target, self.source, verbose=0) - - move_file(self.source, self.target, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target)] - self.assertEqual(self._logs, wanted) - - # back to original state - move_file(self.target, self.source, verbose=0) - - self._logs = [] - # now the target is a dir - os.mkdir(self.target_dir) - move_file(self.source, self.target_dir, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target_dir)] - self.assertEqual(self._logs, wanted) - - def test_move_file_exception_unpacking_rename(self): - # see issue 22182 - with patch("os.rename", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): - with open(self.source, 'w') as fobj: - fobj.write('spam eggs') - move_file(self.source, self.target, verbose=0) - - def test_move_file_exception_unpacking_unlink(self): - # see issue 22182 - with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \ - patch("os.unlink", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): - with open(self.source, 'w') as fobj: - fobj.write('spam eggs') - move_file(self.source, self.target, verbose=0) - - @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') - def test_copy_file_hard_link(self): - with open(self.source, 'w') as f: - f.write('some content') - # Check first that copy_file() will not fall back on copying the file - # instead of creating the hard link. - try: - os.link(self.source, self.target) - except OSError as e: - self.skipTest('os.link: %s' % e) - else: - unlink(self.target) - st = os.stat(self.source) - copy_file(self.source, self.target, link='hard') - st2 = os.stat(self.source) - st3 = os.stat(self.target) - self.assertTrue(os.path.samestat(st, st2), (st, st2)) - self.assertTrue(os.path.samestat(st2, st3), (st2, st3)) - with open(self.source, 'r') as f: - self.assertEqual(f.read(), 'some content') - - @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') - def test_copy_file_hard_link_failure(self): - # If hard linking fails, copy_file() falls back on copying file - # (some special filesystems don't support hard linking even under - # Unix, see issue #8876). - with open(self.source, 'w') as f: - f.write('some content') - st = os.stat(self.source) - with patch("os.link", side_effect=OSError(0, "linking unsupported")): - copy_file(self.source, self.target, link='hard') - st2 = os.stat(self.source) - st3 = os.stat(self.target) - self.assertTrue(os.path.samestat(st, st2), (st, st2)) - self.assertFalse(os.path.samestat(st2, st3), (st2, st3)) - for fn in (self.source, self.target): - with open(fn, 'r') as f: - self.assertEqual(f.read(), 'some content') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(FileUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_filelist.py b/Lib/distutils/tests/test_filelist.py deleted file mode 100644 index 98c97e49f80db5..00000000000000 --- a/Lib/distutils/tests/test_filelist.py +++ /dev/null @@ -1,340 +0,0 @@ -"""Tests for distutils.filelist.""" -import os -import re -import unittest -from distutils import debug -from distutils.log import WARN -from distutils.errors import DistutilsTemplateError -from distutils.filelist import glob_to_re, translate_pattern, FileList -from distutils import filelist - -from test.support import os_helper -from test.support import captured_stdout, run_unittest -from distutils.tests import support - -MANIFEST_IN = """\ -include ok -include xo -exclude xo -include foo.tmp -include buildout.cfg -global-include *.x -global-include *.txt -global-exclude *.tmp -recursive-include f *.oo -recursive-exclude global *.x -graft dir -prune dir3 -""" - - -def make_local_path(s): - """Converts '/' in a string to os.sep""" - return s.replace('/', os.sep) - - -class FileListTestCase(support.LoggingSilencer, - unittest.TestCase): - - def assertNoWarnings(self): - self.assertEqual(self.get_logs(WARN), []) - self.clear_logs() - - def assertWarnings(self): - self.assertGreater(len(self.get_logs(WARN)), 0) - self.clear_logs() - - def test_glob_to_re(self): - sep = os.sep - if os.sep == '\\': - sep = re.escape(os.sep) - - for glob, regex in ( - # simple cases - ('foo*', r'(?s:foo[^%(sep)s]*)\Z'), - ('foo?', r'(?s:foo[^%(sep)s])\Z'), - ('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'), - # special cases - (r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'), - (r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'), - ('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'), - (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')): - regex = regex % {'sep': sep} - self.assertEqual(glob_to_re(glob), regex) - - def test_process_template_line(self): - # testing all MANIFEST.in template patterns - file_list = FileList() - l = make_local_path - - # simulated file list - file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt', - 'buildout.cfg', - # filelist does not filter out VCS directories, - # it's sdist that does - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('global/files.x'), - l('global/here.tmp'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - l('dir3/ok'), - l('dir3/sub/ok.txt'), - ] - - for line in MANIFEST_IN.split('\n'): - if line.strip() == '': - continue - file_list.process_template_line(line) - - wanted = ['ok', - 'buildout.cfg', - 'four.txt', - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - ] - - self.assertEqual(file_list.files, wanted) - - def test_debug_print(self): - file_list = FileList() - with captured_stdout() as stdout: - file_list.debug_print('xxx') - self.assertEqual(stdout.getvalue(), '') - - debug.DEBUG = True - try: - with captured_stdout() as stdout: - file_list.debug_print('xxx') - self.assertEqual(stdout.getvalue(), 'xxx\n') - finally: - debug.DEBUG = False - - def test_set_allfiles(self): - file_list = FileList() - files = ['a', 'b', 'c'] - file_list.set_allfiles(files) - self.assertEqual(file_list.allfiles, files) - - def test_remove_duplicates(self): - file_list = FileList() - file_list.files = ['a', 'b', 'a', 'g', 'c', 'g'] - # files must be sorted beforehand (sdist does it) - file_list.sort() - file_list.remove_duplicates() - self.assertEqual(file_list.files, ['a', 'b', 'c', 'g']) - - def test_translate_pattern(self): - # not regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=False), - 'search')) - - # is a regex - regex = re.compile('a') - self.assertEqual( - translate_pattern(regex, anchor=True, is_regex=True), - regex) - - # plain string flagged as regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=True), - 'search')) - - # glob support - self.assertTrue(translate_pattern( - '*.py', anchor=True, is_regex=False).search('filelist.py')) - - def test_exclude_pattern(self): - # return False if no match - file_list = FileList() - self.assertFalse(file_list.exclude_pattern('*.py')) - - # return True if files match - file_list = FileList() - file_list.files = ['a.py', 'b.py'] - self.assertTrue(file_list.exclude_pattern('*.py')) - - # test excludes - file_list = FileList() - file_list.files = ['a.py', 'a.txt'] - file_list.exclude_pattern('*.py') - self.assertEqual(file_list.files, ['a.txt']) - - def test_include_pattern(self): - # return False if no match - file_list = FileList() - file_list.set_allfiles([]) - self.assertFalse(file_list.include_pattern('*.py')) - - # return True if files match - file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt']) - self.assertTrue(file_list.include_pattern('*.py')) - - # test * matches all files - file_list = FileList() - self.assertIsNone(file_list.allfiles) - file_list.set_allfiles(['a.py', 'b.txt']) - file_list.include_pattern('*') - self.assertEqual(file_list.allfiles, ['a.py', 'b.txt']) - - def test_process_template(self): - l = make_local_path - # invalid lines - file_list = FileList() - for action in ('include', 'exclude', 'global-include', - 'global-exclude', 'recursive-include', - 'recursive-exclude', 'graft', 'prune', 'blarg'): - self.assertRaises(DistutilsTemplateError, - file_list.process_template_line, action) - - # include - file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) - - file_list.process_template_line('include *.py') - self.assertEqual(file_list.files, ['a.py']) - self.assertNoWarnings() - - file_list.process_template_line('include *.rb') - self.assertEqual(file_list.files, ['a.py']) - self.assertWarnings() - - # exclude - file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] - - file_list.process_template_line('exclude *.py') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) - self.assertNoWarnings() - - file_list.process_template_line('exclude *.rb') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) - self.assertWarnings() - - # global-include - file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) - - file_list.process_template_line('global-include *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) - self.assertNoWarnings() - - file_list.process_template_line('global-include *.rb') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) - self.assertWarnings() - - # global-exclude - file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] - - file_list.process_template_line('global-exclude *.py') - self.assertEqual(file_list.files, ['b.txt']) - self.assertNoWarnings() - - file_list.process_template_line('global-exclude *.rb') - self.assertEqual(file_list.files, ['b.txt']) - self.assertWarnings() - - # recursive-include - file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'), - l('d/d/e.py')]) - - file_list.process_template_line('recursive-include d *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertNoWarnings() - - file_list.process_template_line('recursive-include e *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertWarnings() - - # recursive-exclude - file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')] - - file_list.process_template_line('recursive-exclude d *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) - self.assertNoWarnings() - - file_list.process_template_line('recursive-exclude e *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) - self.assertWarnings() - - # graft - file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'), - l('f/f.py')]) - - file_list.process_template_line('graft d') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertNoWarnings() - - file_list.process_template_line('graft e') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertWarnings() - - # prune - file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')] - - file_list.process_template_line('prune d') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) - self.assertNoWarnings() - - file_list.process_template_line('prune e') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) - self.assertWarnings() - - -class FindAllTestCase(unittest.TestCase): - @os_helper.skip_unless_symlink - def test_missing_symlink(self): - with os_helper.temp_cwd(): - os.symlink('foo', 'bar') - self.assertEqual(filelist.findall(), []) - - def test_basic_discovery(self): - """ - When findall is called with no parameters or with - '.' as the parameter, the dot should be omitted from - the results. - """ - with os_helper.temp_cwd(): - os.mkdir('foo') - file1 = os.path.join('foo', 'file1.txt') - os_helper.create_empty_file(file1) - os.mkdir('bar') - file2 = os.path.join('bar', 'file2.txt') - os_helper.create_empty_file(file2) - expected = [file2, file1] - self.assertEqual(sorted(filelist.findall()), expected) - - def test_non_local_discovery(self): - """ - When findall is called with another path, the full - path name should be returned. - """ - with os_helper.temp_dir() as temp_dir: - file1 = os.path.join(temp_dir, 'file1.txt') - os_helper.create_empty_file(file1) - expected = [file1] - self.assertEqual(filelist.findall(temp_dir), expected) - - -def test_suite(): - return unittest.TestSuite([ - unittest.TestLoader().loadTestsFromTestCase(FileListTestCase), - unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase), - ]) - - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py deleted file mode 100644 index c38f98b8b2c294..00000000000000 --- a/Lib/distutils/tests/test_install.py +++ /dev/null @@ -1,261 +0,0 @@ -"""Tests for distutils.command.install.""" - -import os -import sys -import unittest -import site - -from test.support import captured_stdout, run_unittest, requires_subprocess - -from distutils import sysconfig -from distutils.command.install import install, HAS_USER_SITE -from distutils.command import install as install_module -from distutils.command.build_ext import build_ext -from distutils.command.install import INSTALL_SCHEMES -from distutils.core import Distribution -from distutils.errors import DistutilsOptionError -from distutils.extension import Extension - -from distutils.tests import support -from test import support as test_support - - -def _make_ext_name(modname): - return modname + sysconfig.get_config_var('EXT_SUFFIX') - - -class InstallTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): - - def setUp(self): - super().setUp() - self._backup_config_vars = dict(sysconfig._config_vars) - - def tearDown(self): - super().tearDown() - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self._backup_config_vars) - - def test_home_installation_scheme(self): - # This ensure two things: - # - that --home generates the desired set of directory names - # - test --home is supported on all platforms - builddir = self.mkdtemp() - destination = os.path.join(builddir, "installation") - - dist = Distribution({"name": "foopkg"}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(builddir, "setup.py") - dist.command_obj["build"] = support.DummyCommand( - build_base=builddir, - build_lib=os.path.join(builddir, "lib"), - ) - - cmd = install(dist) - cmd.home = destination - cmd.ensure_finalized() - - self.assertEqual(cmd.install_base, destination) - self.assertEqual(cmd.install_platbase, destination) - - def check_path(got, expected): - got = os.path.normpath(got) - expected = os.path.normpath(expected) - self.assertEqual(got, expected) - - libdir = os.path.join(destination, "lib", "python") - check_path(cmd.install_lib, libdir) - platlibdir = os.path.join(destination, sys.platlibdir, "python") - check_path(cmd.install_platlib, platlibdir) - check_path(cmd.install_purelib, libdir) - check_path(cmd.install_headers, - os.path.join(destination, "include", "python", "foopkg")) - check_path(cmd.install_scripts, os.path.join(destination, "bin")) - check_path(cmd.install_data, destination) - - @unittest.skipUnless(HAS_USER_SITE, 'need user site') - def test_user_site(self): - # test install with --user - # preparing the environment for the test - self.old_user_base = site.USER_BASE - self.old_user_site = site.USER_SITE - self.tmpdir = self.mkdtemp() - self.user_base = os.path.join(self.tmpdir, 'B') - self.user_site = os.path.join(self.tmpdir, 'S') - site.USER_BASE = self.user_base - site.USER_SITE = self.user_site - install_module.USER_BASE = self.user_base - install_module.USER_SITE = self.user_site - - def _expanduser(path): - return self.tmpdir - self.old_expand = os.path.expanduser - os.path.expanduser = _expanduser - - def cleanup(): - site.USER_BASE = self.old_user_base - site.USER_SITE = self.old_user_site - install_module.USER_BASE = self.old_user_base - install_module.USER_SITE = self.old_user_site - os.path.expanduser = self.old_expand - - self.addCleanup(cleanup) - - if HAS_USER_SITE: - for key in ('nt_user', 'unix_user'): - self.assertIn(key, INSTALL_SCHEMES) - - dist = Distribution({'name': 'xx'}) - cmd = install(dist) - - # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] - self.assertIn('user', options) - - # setting a value - cmd.user = 1 - - # user base and site shouldn't be created yet - self.assertFalse(os.path.exists(self.user_base)) - self.assertFalse(os.path.exists(self.user_site)) - - # let's run finalize - cmd.ensure_finalized() - - # now they should - self.assertTrue(os.path.exists(self.user_base)) - self.assertTrue(os.path.exists(self.user_site)) - - self.assertIn('userbase', cmd.config_vars) - self.assertIn('usersite', cmd.config_vars) - - def test_handle_extra_path(self): - dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) - cmd = install(dist) - - # two elements - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path', 'dirs']) - self.assertEqual(cmd.extra_dirs, 'dirs') - self.assertEqual(cmd.path_file, 'path') - - # one element - cmd.extra_path = ['path'] - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path']) - self.assertEqual(cmd.extra_dirs, 'path') - self.assertEqual(cmd.path_file, 'path') - - # none - dist.extra_path = cmd.extra_path = None - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, None) - self.assertEqual(cmd.extra_dirs, '') - self.assertEqual(cmd.path_file, None) - - # three elements (no way !) - cmd.extra_path = 'path,dirs,again' - self.assertRaises(DistutilsOptionError, cmd.handle_extra_path) - - def test_finalize_options(self): - dist = Distribution({'name': 'xx'}) - cmd = install(dist) - - # must supply either prefix/exec-prefix/home or - # install-base/install-platbase -- not both - cmd.prefix = 'prefix' - cmd.install_base = 'base' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - # must supply either home or prefix/exec-prefix -- not both - cmd.install_base = None - cmd.home = 'home' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - # can't combine user with prefix/exec_prefix/home or - # install_(plat)base - cmd.prefix = None - cmd.user = 'user' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - def test_record(self): - install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(py_modules=['hello'], - scripts=['sayhi']) - os.chdir(project_dir) - self.write_file('hello.py', "def main(): print('o hai')") - self.write_file('sayhi', 'from hello import main; main()') - - cmd = install(dist) - dist.command_obj['install'] = cmd - cmd.root = install_dir - cmd.record = os.path.join(project_dir, 'filelist') - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.record) - try: - content = f.read() - finally: - f.close() - - found = [os.path.basename(line) for line in content.splitlines()] - expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag, - 'sayhi', - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] - self.assertEqual(found, expected) - - @requires_subprocess() - def test_record_extensions(self): - cmd = test_support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(ext_modules=[ - Extension('xx', ['xxmodule.c'])]) - os.chdir(project_dir) - support.copy_xxmodule_c(project_dir) - - buildextcmd = build_ext(dist) - support.fixup_build_ext(buildextcmd) - buildextcmd.ensure_finalized() - - cmd = install(dist) - dist.command_obj['install'] = cmd - dist.command_obj['build_ext'] = buildextcmd - cmd.root = install_dir - cmd.record = os.path.join(project_dir, 'filelist') - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.record) - try: - content = f.read() - finally: - f.close() - - found = [os.path.basename(line) for line in content.splitlines()] - expected = [_make_ext_name('xx'), - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] - self.assertEqual(found, expected) - - def test_debug_mode(self): - # this covers the code called when DEBUG is set - old_logs_len = len(self.logs) - install_module.DEBUG = True - try: - with captured_stdout(): - self.test_record() - finally: - install_module.DEBUG = False - self.assertGreater(len(self.logs), old_logs_len) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_data.py b/Lib/distutils/tests/test_install_data.py deleted file mode 100644 index 6191d2fa6eefab..00000000000000 --- a/Lib/distutils/tests/test_install_data.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Tests for distutils.command.install_data.""" -import os -import unittest - -from distutils.command.install_data import install_data -from distutils.tests import support -from test.support import run_unittest - -class InstallDataTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def test_simple_run(self): - pkg_dir, dist = self.create_dist() - cmd = install_data(dist) - cmd.install_dir = inst = os.path.join(pkg_dir, 'inst') - - # data_files can contain - # - simple files - # - a tuple with a path, and a list of file - one = os.path.join(pkg_dir, 'one') - self.write_file(one, 'xxx') - inst2 = os.path.join(pkg_dir, 'inst2') - two = os.path.join(pkg_dir, 'two') - self.write_file(two, 'xxx') - - cmd.data_files = [one, (inst2, [two])] - self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])]) - - # let's run the command - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) - rtwo = os.path.split(two)[-1] - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - rone = os.path.split(one)[-1] - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - cmd.outfiles = [] - - # let's try with warn_dir one - cmd.warn_dir = 1 - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - cmd.outfiles = [] - - # now using root and empty dir - cmd.root = os.path.join(pkg_dir, 'root') - inst3 = os.path.join(cmd.install_dir, 'inst3') - inst4 = os.path.join(pkg_dir, 'inst4') - three = os.path.join(cmd.install_dir, 'three') - self.write_file(three, 'xx') - cmd.data_files = [one, (inst2, [two]), - ('inst3', [three]), - (inst4, [])] - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 4) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallDataTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_headers.py b/Lib/distutils/tests/test_install_headers.py deleted file mode 100644 index 1aa4d09cdef731..00000000000000 --- a/Lib/distutils/tests/test_install_headers.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Tests for distutils.command.install_headers.""" -import os -import unittest - -from distutils.command.install_headers import install_headers -from distutils.tests import support -from test.support import run_unittest - -class InstallHeadersTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def test_simple_run(self): - # we have two headers - header_list = self.mkdtemp() - header1 = os.path.join(header_list, 'header1') - header2 = os.path.join(header_list, 'header2') - self.write_file(header1) - self.write_file(header2) - headers = [header1, header2] - - pkg_dir, dist = self.create_dist(headers=headers) - cmd = install_headers(dist) - self.assertEqual(cmd.get_inputs(), headers) - - # let's run the command - cmd.install_dir = os.path.join(pkg_dir, 'inst') - cmd.ensure_finalized() - cmd.run() - - # let's check the results - self.assertEqual(len(cmd.get_outputs()), 2) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallHeadersTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_lib.py b/Lib/distutils/tests/test_install_lib.py deleted file mode 100644 index f840d1a94665ed..00000000000000 --- a/Lib/distutils/tests/test_install_lib.py +++ /dev/null @@ -1,117 +0,0 @@ -"""Tests for distutils.command.install_data.""" -import sys -import os -import importlib.util -import unittest - -from distutils.command.install_lib import install_lib -from distutils.extension import Extension -from distutils.tests import support -from distutils.errors import DistutilsOptionError -from test.support import run_unittest, requires_subprocess - - -class InstallLibTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def test_finalize_options(self): - dist = self.create_dist()[1] - cmd = install_lib(dist) - - cmd.finalize_options() - self.assertEqual(cmd.compile, 1) - self.assertEqual(cmd.optimize, 0) - - # optimize must be 0, 1, or 2 - cmd.optimize = 'foo' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - cmd.optimize = '4' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - cmd.optimize = '2' - cmd.finalize_options() - self.assertEqual(cmd.optimize, 2) - - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') - @requires_subprocess() - def test_byte_compile(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - cmd = install_lib(dist) - cmd.compile = cmd.optimize = 1 - - f = os.path.join(project_dir, 'foo.py') - self.write_file(f, '# python file') - cmd.byte_compile([f]) - pyc_file = importlib.util.cache_from_source('foo.py', optimization='') - pyc_opt_file = importlib.util.cache_from_source('foo.py', - optimization=cmd.optimize) - self.assertTrue(os.path.exists(pyc_file)) - self.assertTrue(os.path.exists(pyc_opt_file)) - - def test_get_outputs(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - os.mkdir('spam') - cmd = install_lib(dist) - - # setting up a dist environment - cmd.compile = cmd.optimize = 1 - cmd.install_dir = self.mkdtemp() - f = os.path.join(project_dir, 'spam', '__init__.py') - self.write_file(f, '# python package') - cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] - cmd.distribution.packages = ['spam'] - cmd.distribution.script_name = 'setup.py' - - # get_outputs should return 4 elements: spam/__init__.py and .pyc, - # foo.import-tag-abiflags.so / foo.pyd - outputs = cmd.get_outputs() - self.assertEqual(len(outputs), 4, outputs) - - def test_get_inputs(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - os.mkdir('spam') - cmd = install_lib(dist) - - # setting up a dist environment - cmd.compile = cmd.optimize = 1 - cmd.install_dir = self.mkdtemp() - f = os.path.join(project_dir, 'spam', '__init__.py') - self.write_file(f, '# python package') - cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] - cmd.distribution.packages = ['spam'] - cmd.distribution.script_name = 'setup.py' - - # get_inputs should return 2 elements: spam/__init__.py and - # foo.import-tag-abiflags.so / foo.pyd - inputs = cmd.get_inputs() - self.assertEqual(len(inputs), 2, inputs) - - @requires_subprocess() - def test_dont_write_bytecode(self): - # makes sure byte_compile is not used - dist = self.create_dist()[1] - cmd = install_lib(dist) - cmd.compile = 1 - cmd.optimize = 1 - - old_dont_write_bytecode = sys.dont_write_bytecode - sys.dont_write_bytecode = True - try: - cmd.byte_compile([]) - finally: - sys.dont_write_bytecode = old_dont_write_bytecode - - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallLibTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_scripts.py b/Lib/distutils/tests/test_install_scripts.py deleted file mode 100644 index 648db3b11da745..00000000000000 --- a/Lib/distutils/tests/test_install_scripts.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Tests for distutils.command.install_scripts.""" - -import os -import unittest - -from distutils.command.install_scripts import install_scripts -from distutils.core import Distribution - -from distutils.tests import support -from test.support import run_unittest - - -class InstallScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_default_settings(self): - dist = Distribution() - dist.command_obj["build"] = support.DummyCommand( - build_scripts="/foo/bar") - dist.command_obj["install"] = support.DummyCommand( - install_scripts="/splat/funk", - force=1, - skip_build=1, - ) - cmd = install_scripts(dist) - self.assertFalse(cmd.force) - self.assertFalse(cmd.skip_build) - self.assertIsNone(cmd.build_dir) - self.assertIsNone(cmd.install_dir) - - cmd.finalize_options() - - self.assertTrue(cmd.force) - self.assertTrue(cmd.skip_build) - self.assertEqual(cmd.build_dir, "/foo/bar") - self.assertEqual(cmd.install_dir, "/splat/funk") - - def test_installation(self): - source = self.mkdtemp() - expected = [] - - def write_script(name, text): - expected.append(name) - f = open(os.path.join(source, name), "w") - try: - f.write(text) - finally: - f.close() - - write_script("script1.py", ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("script2.py", ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("shell.sh", ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) - - target = self.mkdtemp() - dist = Distribution() - dist.command_obj["build"] = support.DummyCommand(build_scripts=source) - dist.command_obj["install"] = support.DummyCommand( - install_scripts=target, - force=1, - skip_build=1, - ) - cmd = install_scripts(dist) - cmd.finalize_options() - cmd.run() - - installed = os.listdir(target) - for name in expected: - self.assertIn(name, installed) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallScriptsTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_log.py b/Lib/distutils/tests/test_log.py deleted file mode 100644 index ec2ae028de8777..00000000000000 --- a/Lib/distutils/tests/test_log.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Tests for distutils.log""" - -import io -import sys -import unittest -from test.support import swap_attr, run_unittest - -from distutils import log - -class TestLog(unittest.TestCase): - def test_non_ascii(self): - # Issues #8663, #34421: test that non-encodable text is escaped with - # backslashreplace error handler and encodable non-ASCII text is - # output as is. - for errors in ('strict', 'backslashreplace', 'surrogateescape', - 'replace', 'ignore'): - with self.subTest(errors=errors): - stdout = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) - stderr = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) - old_threshold = log.set_threshold(log.DEBUG) - try: - with swap_attr(sys, 'stdout', stdout), \ - swap_attr(sys, 'stderr', stderr): - log.debug('Dεbug\tMėssãge') - log.fatal('Fαtal\tÈrrōr') - finally: - log.set_threshold(old_threshold) - - stdout.seek(0) - self.assertEqual(stdout.read().rstrip(), - 'Dεbug\tM?ss?ge' if errors == 'replace' else - 'Dεbug\tMssge' if errors == 'ignore' else - 'Dεbug\tM\\u0117ss\\xe3ge') - stderr.seek(0) - self.assertEqual(stderr.read().rstrip(), - 'Fαtal\t?rr?r' if errors == 'replace' else - 'Fαtal\trrr' if errors == 'ignore' else - 'Fαtal\t\\xc8rr\\u014dr') - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(TestLog) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_msvc9compiler.py b/Lib/distutils/tests/test_msvc9compiler.py deleted file mode 100644 index 6235405e31201b..00000000000000 --- a/Lib/distutils/tests/test_msvc9compiler.py +++ /dev/null @@ -1,184 +0,0 @@ -"""Tests for distutils.msvc9compiler.""" -import sys -import unittest -import os - -from distutils.errors import DistutilsPlatformError -from distutils.tests import support -from test.support import run_unittest - -# A manifest with the only assembly reference being the msvcrt assembly, so -# should have the assembly completely stripped. Note that although the -# assembly has a <security> reference the assembly is removed - that is -# currently a "feature", not a bug :) -_MANIFEST_WITH_ONLY_MSVC_REFERENCE = """\ -<?xml version="1.0" encoding="UTF-8" standalone="yes"?> -<assembly xmlns="urn:schemas-microsoft-com:asm.v1" - manifestVersion="1.0"> - <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> - <security> - <requestedPrivileges> - <requestedExecutionLevel level="asInvoker" uiAccess="false"> - </requestedExecutionLevel> - </requestedPrivileges> - </security> - </trustInfo> - <dependency> - <dependentAssembly> - <assemblyIdentity type="win32" name="Microsoft.VC90.CRT" - version="9.0.21022.8" processorArchitecture="x86" - publicKeyToken="XXXX"> - </assemblyIdentity> - </dependentAssembly> - </dependency> -</assembly> -""" - -# A manifest with references to assemblies other than msvcrt. When processed, -# this assembly should be returned with just the msvcrt part removed. -_MANIFEST_WITH_MULTIPLE_REFERENCES = """\ -<?xml version="1.0" encoding="UTF-8" standalone="yes"?> -<assembly xmlns="urn:schemas-microsoft-com:asm.v1" - manifestVersion="1.0"> - <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> - <security> - <requestedPrivileges> - <requestedExecutionLevel level="asInvoker" uiAccess="false"> - </requestedExecutionLevel> - </requestedPrivileges> - </security> - </trustInfo> - <dependency> - <dependentAssembly> - <assemblyIdentity type="win32" name="Microsoft.VC90.CRT" - version="9.0.21022.8" processorArchitecture="x86" - publicKeyToken="XXXX"> - </assemblyIdentity> - </dependentAssembly> - </dependency> - <dependency> - <dependentAssembly> - <assemblyIdentity type="win32" name="Microsoft.VC90.MFC" - version="9.0.21022.8" processorArchitecture="x86" - publicKeyToken="XXXX"></assemblyIdentity> - </dependentAssembly> - </dependency> -</assembly> -""" - -_CLEANED_MANIFEST = """\ -<?xml version="1.0" encoding="UTF-8" standalone="yes"?> -<assembly xmlns="urn:schemas-microsoft-com:asm.v1" - manifestVersion="1.0"> - <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> - <security> - <requestedPrivileges> - <requestedExecutionLevel level="asInvoker" uiAccess="false"> - </requestedExecutionLevel> - </requestedPrivileges> - </security> - </trustInfo> - <dependency> - - </dependency> - <dependency> - <dependentAssembly> - <assemblyIdentity type="win32" name="Microsoft.VC90.MFC" - version="9.0.21022.8" processorArchitecture="x86" - publicKeyToken="XXXX"></assemblyIdentity> - </dependentAssembly> - </dependency> -</assembly>""" - -if sys.platform=="win32": - from distutils.msvccompiler import get_build_version - if get_build_version()>=8.0: - SKIP_MESSAGE = None - else: - SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" -else: - SKIP_MESSAGE = "These tests are only for win32" - -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvc9compilerTestCase(support.TempdirManager, - unittest.TestCase): - - def test_no_compiler(self): - # makes sure query_vcvarsall raises - # a DistutilsPlatformError if the compiler - # is not found - from distutils.msvc9compiler import query_vcvarsall - def _find_vcvarsall(version): - return None - - from distutils import msvc9compiler - old_find_vcvarsall = msvc9compiler.find_vcvarsall - msvc9compiler.find_vcvarsall = _find_vcvarsall - try: - self.assertRaises(DistutilsPlatformError, query_vcvarsall, - 'wont find this version') - finally: - msvc9compiler.find_vcvarsall = old_find_vcvarsall - - def test_reg_class(self): - from distutils.msvc9compiler import Reg - self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx') - - # looking for values that should exist on all - # windows registry versions. - path = r'Control Panel\Desktop' - v = Reg.get_value(path, 'dragfullwindows') - self.assertIn(v, ('0', '1', '2')) - - import winreg - HKCU = winreg.HKEY_CURRENT_USER - keys = Reg.read_keys(HKCU, 'xxxx') - self.assertEqual(keys, None) - - keys = Reg.read_keys(HKCU, r'Control Panel') - self.assertIn('Desktop', keys) - - def test_remove_visual_c_ref(self): - from distutils.msvc9compiler import MSVCCompiler - tempdir = self.mkdtemp() - manifest = os.path.join(tempdir, 'manifest') - f = open(manifest, 'w') - try: - f.write(_MANIFEST_WITH_MULTIPLE_REFERENCES) - finally: - f.close() - - compiler = MSVCCompiler() - compiler._remove_visual_c_ref(manifest) - - # see what we got - f = open(manifest) - try: - # removing trailing spaces - content = '\n'.join([line.rstrip() for line in f.readlines()]) - finally: - f.close() - - # makes sure the manifest was properly cleaned - self.assertEqual(content, _CLEANED_MANIFEST) - - def test_remove_entire_manifest(self): - from distutils.msvc9compiler import MSVCCompiler - tempdir = self.mkdtemp() - manifest = os.path.join(tempdir, 'manifest') - f = open(manifest, 'w') - try: - f.write(_MANIFEST_WITH_ONLY_MSVC_REFERENCE) - finally: - f.close() - - compiler = MSVCCompiler() - got = compiler._remove_visual_c_ref(manifest) - self.assertIsNone(got) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(msvc9compilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_msvccompiler.py b/Lib/distutils/tests/test_msvccompiler.py deleted file mode 100644 index dd67c3eb6d519d..00000000000000 --- a/Lib/distutils/tests/test_msvccompiler.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Tests for distutils._msvccompiler.""" -import sys -import unittest -import os - -from distutils.errors import DistutilsPlatformError -from distutils.tests import support -from test.support import run_unittest - - -SKIP_MESSAGE = (None if sys.platform == "win32" else - "These tests are only for win32") - -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvccompilerTestCase(support.TempdirManager, - unittest.TestCase): - - def test_no_compiler(self): - import distutils._msvccompiler as _msvccompiler - # makes sure query_vcvarsall raises - # a DistutilsPlatformError if the compiler - # is not found - def _find_vcvarsall(plat_spec): - return None, None - - old_find_vcvarsall = _msvccompiler._find_vcvarsall - _msvccompiler._find_vcvarsall = _find_vcvarsall - try: - self.assertRaises(DistutilsPlatformError, - _msvccompiler._get_vc_env, - 'wont find this version') - finally: - _msvccompiler._find_vcvarsall = old_find_vcvarsall - - def test_get_vc_env_unicode(self): - import distutils._msvccompiler as _msvccompiler - - test_var = 'ṰḖṤṪ┅ṼẨṜ' - test_value = '₃⁴₅' - - # Ensure we don't early exit from _get_vc_env - old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None) - os.environ[test_var] = test_value - try: - env = _msvccompiler._get_vc_env('x86') - self.assertIn(test_var.lower(), env) - self.assertEqual(test_value, env[test_var.lower()]) - finally: - os.environ.pop(test_var) - if old_distutils_use_sdk: - os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk - - def test_get_vc2017(self): - import distutils._msvccompiler as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2017 - # and mark it skipped if we do not. - version, path = _msvccompiler._find_vc2017() - if version: - self.assertGreaterEqual(version, 15) - self.assertTrue(os.path.isdir(path)) - else: - raise unittest.SkipTest("VS 2017 is not installed") - - def test_get_vc2015(self): - import distutils._msvccompiler as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2015 - # and mark it skipped if we do not. - version, path = _msvccompiler._find_vc2015() - if version: - self.assertGreaterEqual(version, 14) - self.assertTrue(os.path.isdir(path)) - else: - raise unittest.SkipTest("VS 2015 is not installed") - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(msvccompilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_register.py b/Lib/distutils/tests/test_register.py deleted file mode 100644 index 7805c6d3c9f7b9..00000000000000 --- a/Lib/distutils/tests/test_register.py +++ /dev/null @@ -1,324 +0,0 @@ -"""Tests for distutils.command.register.""" -import os -import unittest -import getpass -import urllib -import warnings - -from test.support import run_unittest -from test.support.warnings_helper import check_warnings - -from distutils.command import register as register_module -from distutils.command.register import register -from distutils.errors import DistutilsSetupError -from distutils.log import INFO - -from distutils.tests.test_config import BasePyPIRCCommandTestCase - -try: - import docutils -except ImportError: - docutils = None - -PYPIRC_NOPASSWORD = """\ -[distutils] - -index-servers = - server1 - -[server1] -username:me -""" - -WANTED_PYPIRC = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:tarek -password:password -""" - -class Inputs(object): - """Fakes user inputs.""" - def __init__(self, *answers): - self.answers = answers - self.index = 0 - - def __call__(self, prompt=''): - try: - return self.answers[self.index] - finally: - self.index += 1 - -class FakeOpener(object): - """Fakes a PyPI server""" - def __init__(self): - self.reqs = [] - - def __call__(self, *args): - return self - - def open(self, req, data=None, timeout=None): - self.reqs.append(req) - return self - - def read(self): - return b'xxx' - - def getheader(self, name, default=None): - return { - 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) - - -class RegisterTestCase(BasePyPIRCCommandTestCase): - - def setUp(self): - super(RegisterTestCase, self).setUp() - # patching the password prompt - self._old_getpass = getpass.getpass - def _getpass(prompt): - return 'password' - getpass.getpass = _getpass - urllib.request._opener = None - self.old_opener = urllib.request.build_opener - self.conn = urllib.request.build_opener = FakeOpener() - - def tearDown(self): - getpass.getpass = self._old_getpass - urllib.request._opener = None - urllib.request.build_opener = self.old_opener - super(RegisterTestCase, self).tearDown() - - def _get_cmd(self, metadata=None): - if metadata is None: - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} - pkg_info, dist = self.create_dist(**metadata) - return register(dist) - - def test_create_pypirc(self): - # this test makes sure a .pypirc file - # is created when requested. - - # let's create a register instance - cmd = self._get_cmd() - - # we shouldn't have a .pypirc file yet - self.assertFalse(os.path.exists(self.rc)) - - # patching input and getpass.getpass - # so register gets happy - # - # Here's what we are faking : - # use your existing login (choice 1.) - # Username : 'tarek' - # Password : 'password' - # Save your login (y/N)? : 'y' - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - # we should have a brand new .pypirc file - self.assertTrue(os.path.exists(self.rc)) - - # with the content similar to WANTED_PYPIRC - f = open(self.rc) - try: - content = f.read() - self.assertEqual(content, WANTED_PYPIRC) - finally: - f.close() - - # now let's make sure the .pypirc file generated - # really works : we shouldn't be asked anything - # if we run the command again - def _no_way(prompt=''): - raise AssertionError(prompt) - register_module.input = _no_way - - cmd.show_response = 1 - cmd.run() - - # let's see what the server received : we should - # have 2 similar requests - self.assertEqual(len(self.conn.reqs), 2) - req1 = dict(self.conn.reqs[0].headers) - req2 = dict(self.conn.reqs[1].headers) - - self.assertEqual(req1['Content-length'], '1374') - self.assertEqual(req2['Content-length'], '1374') - self.assertIn(b'xxx', self.conn.reqs[1].data) - - def test_password_not_in_file(self): - - self.write_file(self.rc, PYPIRC_NOPASSWORD) - cmd = self._get_cmd() - cmd._set_config() - cmd.finalize_options() - cmd.send_metadata() - - # dist.password should be set - # therefore used afterwards by other commands - self.assertEqual(cmd.distribution.password, 'password') - - def test_registering(self): - # this test runs choice 2 - cmd = self._get_cmd() - inputs = Inputs('2', 'tarek', 'tarek@ziade.org') - register_module.input = inputs.__call__ - try: - # let's run the command - cmd.run() - finally: - del register_module.input - - # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) - req = self.conn.reqs[0] - headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '608') - self.assertIn(b'tarek', req.data) - - def test_password_reset(self): - # this test runs choice 3 - cmd = self._get_cmd() - inputs = Inputs('3', 'tarek@ziade.org') - register_module.input = inputs.__call__ - try: - # let's run the command - cmd.run() - finally: - del register_module.input - - # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) - req = self.conn.reqs[0] - headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '290') - self.assertIn(b'tarek', req.data) - - @unittest.skipUnless(docutils is not None, 'needs docutils') - def test_strict(self): - # testing the script option - # when on, the register command stops if - # the metadata is incomplete or if - # long_description is not reSt compliant - - # empty metadata - cmd = self._get_cmd({}) - cmd.ensure_finalized() - cmd.strict = 1 - self.assertRaises(DistutilsSetupError, cmd.run) - - # metadata are OK but long_description is broken - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'éxéxé', - 'name': 'xxx', 'version': 'xxx', - 'long_description': 'title\n==\n\ntext'} - - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = 1 - self.assertRaises(DistutilsSetupError, cmd.run) - - # now something that works - metadata['long_description'] = 'title\n=====\n\ntext' - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = 1 - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - # strict is not by default - cmd = self._get_cmd() - cmd.ensure_finalized() - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - # and finally a Unicode test (bug #12114) - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} - - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = 1 - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - @unittest.skipUnless(docutils is not None, 'needs docutils') - def test_register_invalid_long_description(self): - description = ':funkie:`str`' # mimic Sphinx-specific markup - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': description} - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = True - inputs = Inputs('2', 'tarek', 'tarek@ziade.org') - register_module.input = inputs - self.addCleanup(delattr, register_module, 'input') - - self.assertRaises(DistutilsSetupError, cmd.run) - - def test_check_metadata_deprecated(self): - # makes sure make_metadata is deprecated - cmd = self._get_cmd() - with check_warnings() as w: - warnings.simplefilter("always") - cmd.check_metadata() - self.assertEqual(len(w.warnings), 1) - - def test_list_classifiers(self): - cmd = self._get_cmd() - cmd.list_classifiers = 1 - cmd.run() - results = self.get_logs(INFO) - self.assertEqual(results, ['running check', 'xxx']) - - def test_show_response(self): - # test that the --show-response option return a well formatted response - cmd = self._get_cmd() - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - cmd.show_response = 1 - try: - cmd.run() - finally: - del register_module.input - - results = self.get_logs(INFO) - self.assertEqual(results[3], 75 * '-' + '\nxxx\n' + 75 * '-') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(RegisterTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py deleted file mode 100644 index 46b3a13e470c4e..00000000000000 --- a/Lib/distutils/tests/test_sdist.py +++ /dev/null @@ -1,493 +0,0 @@ -"""Tests for distutils.command.sdist.""" -import os -import tarfile -import unittest -import warnings -import zipfile -from os.path import join -from textwrap import dedent -from test.support import captured_stdout, run_unittest -from test.support.warnings_helper import check_warnings - -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - -try: - import grp - import pwd - UID_GID_SUPPORT = True -except ImportError: - UID_GID_SUPPORT = False - -from distutils.command.sdist import sdist, show_formats -from distutils.core import Distribution -from distutils.tests.test_config import BasePyPIRCCommandTestCase -from distutils.errors import DistutilsOptionError -from distutils.spawn import find_executable -from distutils.log import WARN -from distutils.filelist import FileList -from distutils.archive_util import ARCHIVE_FORMATS - -SETUP_PY = """ -from distutils.core import setup -import somecode - -setup(name='fake') -""" - -MANIFEST = """\ -# file GENERATED by distutils, do NOT edit -README -buildout.cfg -inroot.txt -setup.py -data%(sep)sdata.dt -scripts%(sep)sscript.py -some%(sep)sfile.txt -some%(sep)sother_file.txt -somecode%(sep)s__init__.py -somecode%(sep)sdoc.dat -somecode%(sep)sdoc.txt -""" - -class SDistTestCase(BasePyPIRCCommandTestCase): - - def setUp(self): - # PyPIRCCommandTestCase creates a temp dir already - # and put it in self.tmp_dir - super(SDistTestCase, self).setUp() - # setting up an environment - self.old_path = os.getcwd() - os.mkdir(join(self.tmp_dir, 'somecode')) - os.mkdir(join(self.tmp_dir, 'dist')) - # a package, and a README - self.write_file((self.tmp_dir, 'README'), 'xxx') - self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#') - self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY) - os.chdir(self.tmp_dir) - - def tearDown(self): - # back to normal - os.chdir(self.old_path) - super(SDistTestCase, self).tearDown() - - def get_cmd(self, metadata=None): - """Returns a cmd""" - if metadata is None: - metadata = {'name': 'fake', 'version': '1.0', - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'} - dist = Distribution(metadata) - dist.script_name = 'setup.py' - dist.packages = ['somecode'] - dist.include_package_data = True - cmd = sdist(dist) - cmd.dist_dir = 'dist' - return dist, cmd - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_prune_file_list(self): - # this test creates a project with some VCS dirs and an NFS rename - # file, then launches sdist to check they get pruned on all systems - - # creating VCS directories with some files in them - os.mkdir(join(self.tmp_dir, 'somecode', '.svn')) - self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx') - - os.mkdir(join(self.tmp_dir, 'somecode', '.hg')) - self.write_file((self.tmp_dir, 'somecode', '.hg', - 'ok'), 'xxx') - - os.mkdir(join(self.tmp_dir, 'somecode', '.git')) - self.write_file((self.tmp_dir, 'somecode', '.git', - 'ok'), 'xxx') - - self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx') - - # now building a sdist - dist, cmd = self.get_cmd() - - # zip is available universally - # (tar might not be installed under win32) - cmd.formats = ['zip'] - - cmd.ensure_finalized() - cmd.run() - - # now let's check what we have - dist_folder = join(self.tmp_dir, 'dist') - files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) - - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) - try: - content = zip_file.namelist() - finally: - zip_file.close() - - # making sure everything has been pruned correctly - expected = ['', 'PKG-INFO', 'README', 'setup.py', - 'somecode/', 'somecode/__init__.py'] - self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") - def test_make_distribution(self): - # now building a sdist - dist, cmd = self.get_cmd() - - # creating a gztar then a tar - cmd.formats = ['gztar', 'tar'] - cmd.ensure_finalized() - cmd.run() - - # making sure we have two files - dist_folder = join(self.tmp_dir, 'dist') - result = os.listdir(dist_folder) - result.sort() - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) - - os.remove(join(dist_folder, 'fake-1.0.tar')) - os.remove(join(dist_folder, 'fake-1.0.tar.gz')) - - # now trying a tar then a gztar - cmd.formats = ['tar', 'gztar'] - - cmd.ensure_finalized() - cmd.run() - - result = os.listdir(dist_folder) - result.sort() - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_add_defaults(self): - - # http://bugs.python.org/issue2279 - - # add_default should also include - # data_files and package_data - dist, cmd = self.get_cmd() - - # filling data_files by pointing files - # in package_data - dist.package_data = {'': ['*.cfg', '*.dat'], - 'somecode': ['*.txt']} - self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') - self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#') - - # adding some data in data_files - data_dir = join(self.tmp_dir, 'data') - os.mkdir(data_dir) - self.write_file((data_dir, 'data.dt'), '#') - some_dir = join(self.tmp_dir, 'some') - os.mkdir(some_dir) - # make sure VCS directories are pruned (#14004) - hg_dir = join(self.tmp_dir, '.hg') - os.mkdir(hg_dir) - self.write_file((hg_dir, 'last-message.txt'), '#') - # a buggy regex used to prevent this from working on windows (#6884) - self.write_file((self.tmp_dir, 'buildout.cfg'), '#') - self.write_file((self.tmp_dir, 'inroot.txt'), '#') - self.write_file((some_dir, 'file.txt'), '#') - self.write_file((some_dir, 'other_file.txt'), '#') - - dist.data_files = [('data', ['data/data.dt', - 'buildout.cfg', - 'inroot.txt', - 'notexisting']), - 'some/file.txt', - 'some/other_file.txt'] - - # adding a script - script_dir = join(self.tmp_dir, 'scripts') - os.mkdir(script_dir) - self.write_file((script_dir, 'script.py'), '#') - dist.scripts = [join('scripts', 'script.py')] - - cmd.formats = ['zip'] - cmd.use_defaults = True - - cmd.ensure_finalized() - cmd.run() - - # now let's check what we have - dist_folder = join(self.tmp_dir, 'dist') - files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) - - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) - try: - content = zip_file.namelist() - finally: - zip_file.close() - - # making sure everything was added - expected = ['', 'PKG-INFO', 'README', 'buildout.cfg', - 'data/', 'data/data.dt', 'inroot.txt', - 'scripts/', 'scripts/script.py', 'setup.py', - 'some/', 'some/file.txt', 'some/other_file.txt', - 'somecode/', 'somecode/__init__.py', 'somecode/doc.dat', - 'somecode/doc.txt'] - self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) - - # checking the MANIFEST - f = open(join(self.tmp_dir, 'MANIFEST')) - try: - manifest = f.read() - finally: - f.close() - self.assertEqual(manifest, MANIFEST % {'sep': os.sep}) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_metadata_check_option(self): - # testing the `medata-check` option - dist, cmd = self.get_cmd(metadata={}) - - # this should raise some warnings ! - # with the `check` subcommand - cmd.ensure_finalized() - cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] - self.assertEqual(len(warnings), 2) - - # trying with a complete set of metadata - self.clear_logs() - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - cmd.metadata_check = 0 - cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] - self.assertEqual(len(warnings), 0) - - def test_check_metadata_deprecated(self): - # makes sure make_metadata is deprecated - dist, cmd = self.get_cmd() - with check_warnings() as w: - warnings.simplefilter("always") - cmd.check_metadata() - self.assertEqual(len(w.warnings), 1) - - def test_show_formats(self): - with captured_stdout() as stdout: - show_formats() - - # the output should be a header line + one line per format - num_formats = len(ARCHIVE_FORMATS.keys()) - output = [line for line in stdout.getvalue().split('\n') - if line.strip().startswith('--formats=')] - self.assertEqual(len(output), num_formats) - - def test_finalize_options(self): - dist, cmd = self.get_cmd() - cmd.finalize_options() - - # default options set by finalize - self.assertEqual(cmd.manifest, 'MANIFEST') - self.assertEqual(cmd.template, 'MANIFEST.in') - self.assertEqual(cmd.dist_dir, 'dist') - - # formats has to be a string splitable on (' ', ',') or - # a stringlist - cmd.formats = 1 - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - cmd.formats = ['zip'] - cmd.finalize_options() - - # formats has to be known - cmd.formats = 'supazipa' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - # the following tests make sure there is a nice error message instead - # of a traceback when parsing an invalid manifest template - - def _check_template(self, content): - dist, cmd = self.get_cmd() - os.chdir(self.tmp_dir) - self.write_file('MANIFEST.in', content) - cmd.ensure_finalized() - cmd.filelist = FileList() - cmd.read_template() - warnings = self.get_logs(WARN) - self.assertEqual(len(warnings), 1) - - def test_invalid_template_unknown_command(self): - self._check_template('taunt knights *') - - def test_invalid_template_wrong_arguments(self): - # this manifest command takes one argument - self._check_template('prune') - - @unittest.skipIf(os.name != 'nt', 'test relevant for Windows only') - def test_invalid_template_wrong_path(self): - # on Windows, trailing slashes are not allowed - # this used to crash instead of raising a warning: #8286 - self._check_template('include examples/') - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_get_file_list(self): - # make sure MANIFEST is recalculated - dist, cmd = self.get_cmd() - - # filling data_files by pointing files in package_data - dist.package_data = {'somecode': ['*.txt']} - self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') - cmd.formats = ['gztar'] - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.manifest) - try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - self.assertEqual(len(manifest), 5) - - # adding a file - self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') - - # make sure build_py is reinitialized, like a fresh run - build_py = dist.get_command_obj('build_py') - build_py.finalized = False - build_py.ensure_finalized() - - cmd.run() - - f = open(cmd.manifest) - try: - manifest2 = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - # do we have the new file in MANIFEST ? - self.assertEqual(len(manifest2), 6) - self.assertIn('doc2.txt', manifest2[-1]) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_manifest_marker(self): - # check that autogenerated MANIFESTs have a marker - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.manifest) - try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - self.assertEqual(manifest[0], - '# file GENERATED by distutils, do NOT edit') - - @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") - def test_manifest_comments(self): - # make sure comments don't cause exceptions or wrong includes - contents = dedent("""\ - # bad.py - #bad.py - good.py - """) - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - self.write_file((self.tmp_dir, cmd.manifest), contents) - self.write_file((self.tmp_dir, 'good.py'), '# pick me!') - self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") - self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") - cmd.run() - self.assertEqual(cmd.filelist.files, ['good.py']) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_manual_manifest(self): - # check that a MANIFEST without a marker is left alone - dist, cmd = self.get_cmd() - cmd.formats = ['gztar'] - cmd.ensure_finalized() - self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') - self.write_file((self.tmp_dir, 'README.manual'), - 'This project maintains its MANIFEST file itself.') - cmd.run() - self.assertEqual(cmd.filelist.files, ['README.manual']) - - f = open(cmd.manifest) - try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - self.assertEqual(manifest, ['README.manual']) - - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - archive = tarfile.open(archive_name) - try: - filenames = [tarinfo.name for tarinfo in archive] - finally: - archive.close() - self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', - 'fake-1.0/README.manual']) - - @unittest.skipUnless(ZLIB_SUPPORT, "requires zlib") - @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") - def test_make_distribution_owner_group(self): - # now building a sdist - dist, cmd = self.get_cmd() - - # creating a gztar and specifying the owner+group - cmd.formats = ['gztar'] - cmd.owner = pwd.getpwuid(0)[0] - cmd.group = grp.getgrgid(0)[0] - cmd.ensure_finalized() - cmd.run() - - # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - archive = tarfile.open(archive_name) - try: - for member in archive.getmembers(): - self.assertEqual(member.uid, 0) - self.assertEqual(member.gid, 0) - finally: - archive.close() - - # building a sdist again - dist, cmd = self.get_cmd() - - # creating a gztar - cmd.formats = ['gztar'] - cmd.ensure_finalized() - cmd.run() - - # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - archive = tarfile.open(archive_name) - - # note that we are not testing the group ownership here - # because, depending on the platforms and the container - # rights (see #7408) - try: - for member in archive.getmembers(): - self.assertEqual(member.uid, os.getuid()) - finally: - archive.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(SDistTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_spawn.py b/Lib/distutils/tests/test_spawn.py deleted file mode 100644 index a0a1145da5df8e..00000000000000 --- a/Lib/distutils/tests/test_spawn.py +++ /dev/null @@ -1,139 +0,0 @@ -"""Tests for distutils.spawn.""" -import os -import stat -import sys -import unittest.mock -from test.support import run_unittest, unix_shell, requires_subprocess -from test.support import os_helper - -from distutils.spawn import find_executable -from distutils.spawn import spawn -from distutils.errors import DistutilsExecError -from distutils.tests import support - - -@requires_subprocess() -class SpawnTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - @unittest.skipUnless(os.name in ('nt', 'posix'), - 'Runs only under posix or nt') - def test_spawn(self): - tmpdir = self.mkdtemp() - - # creating something executable - # through the shell that returns 1 - if sys.platform != 'win32': - exe = os.path.join(tmpdir, 'foo.sh') - self.write_file(exe, '#!%s\nexit 1' % unix_shell) - else: - exe = os.path.join(tmpdir, 'foo.bat') - self.write_file(exe, 'exit 1') - - os.chmod(exe, 0o777) - self.assertRaises(DistutilsExecError, spawn, [exe]) - - # now something that works - if sys.platform != 'win32': - exe = os.path.join(tmpdir, 'foo.sh') - self.write_file(exe, '#!%s\nexit 0' % unix_shell) - else: - exe = os.path.join(tmpdir, 'foo.bat') - self.write_file(exe, 'exit 0') - - os.chmod(exe, 0o777) - spawn([exe]) # should work without any error - - def test_find_executable(self): - with os_helper.temp_dir() as tmp_dir: - # use TESTFN to get a pseudo-unique filename - program_noeext = os_helper.TESTFN - # Give the temporary program an ".exe" suffix for all. - # It's needed on Windows and not harmful on other platforms. - program = program_noeext + ".exe" - - filename = os.path.join(tmp_dir, program) - with open(filename, "wb"): - pass - os.chmod(filename, stat.S_IXUSR) - - # test path parameter - rv = find_executable(program, path=tmp_dir) - self.assertEqual(rv, filename) - - if sys.platform == 'win32': - # test without ".exe" extension - rv = find_executable(program_noeext, path=tmp_dir) - self.assertEqual(rv, filename) - - # test find in the current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # test non-existent program - dont_exist_program = "dontexist_" + program - rv = find_executable(dont_exist_program , path=tmp_dir) - self.assertIsNone(rv) - - # PATH='': no match, except in the current directory - with os_helper.EnvironmentVarGuard() as env: - env['PATH'] = '' - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): - rv = find_executable(program) - self.assertIsNone(rv) - - # look in current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # PATH=':': explicitly looks in the current directory - with os_helper.EnvironmentVarGuard() as env: - env['PATH'] = os.pathsep - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value='', create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): - rv = find_executable(program) - self.assertIsNone(rv) - - # look in current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # missing PATH: test os.confstr("CS_PATH") and os.defpath - with os_helper.EnvironmentVarGuard() as env: - env.pop('PATH', None) - - # without confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - side_effect=ValueError, - create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, filename) - - # with confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): - rv = find_executable(program) - self.assertEqual(rv, filename) - - def test_spawn_missing_exe(self): - with self.assertRaises(DistutilsExecError) as ctx: - spawn(['does-not-exist']) - self.assertIn("command 'does-not-exist' failed", str(ctx.exception)) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(SpawnTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py deleted file mode 100644 index ae0eca897bc7bd..00000000000000 --- a/Lib/distutils/tests/test_sysconfig.py +++ /dev/null @@ -1,263 +0,0 @@ -"""Tests for distutils.sysconfig.""" -import contextlib -import os -import shutil -import subprocess -import sys -import textwrap -import unittest - -from distutils import sysconfig -from distutils.ccompiler import get_default_compiler -from distutils.tests import support -from test.support import run_unittest, swap_item, requires_subprocess, is_wasi -from test.support.os_helper import TESTFN - - -class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): - def setUp(self): - super(SysconfigTestCase, self).setUp() - self.makefile = None - - def tearDown(self): - if self.makefile is not None: - os.unlink(self.makefile) - self.cleanup_testfn() - super(SysconfigTestCase, self).tearDown() - - def cleanup_testfn(self): - if os.path.isfile(TESTFN): - os.remove(TESTFN) - elif os.path.isdir(TESTFN): - shutil.rmtree(TESTFN) - - @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") - def test_get_config_h_filename(self): - config_h = sysconfig.get_config_h_filename() - self.assertTrue(os.path.isfile(config_h), config_h) - - def test_get_python_lib(self): - # XXX doesn't work on Linux when Python was never installed before - #self.assertTrue(os.path.isdir(lib_dir), lib_dir) - # test for pythonxx.lib? - self.assertNotEqual(sysconfig.get_python_lib(), - sysconfig.get_python_lib(prefix=TESTFN)) - - def test_get_config_vars(self): - cvars = sysconfig.get_config_vars() - self.assertIsInstance(cvars, dict) - self.assertTrue(cvars) - - @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") - def test_srcdir(self): - # See Issues #15322, #15364. - srcdir = sysconfig.get_config_var('srcdir') - - self.assertTrue(os.path.isabs(srcdir), srcdir) - self.assertTrue(os.path.isdir(srcdir), srcdir) - - if sysconfig.python_build: - # The python executable has not been installed so srcdir - # should be a full source checkout. - Python_h = os.path.join(srcdir, 'Include', 'Python.h') - self.assertTrue(os.path.exists(Python_h), Python_h) - # <srcdir>/PC/pyconfig.h always exists even if unused on POSIX. - pyconfig_h = os.path.join(srcdir, 'PC', 'pyconfig.h') - self.assertTrue(os.path.exists(pyconfig_h), pyconfig_h) - pyconfig_h_in = os.path.join(srcdir, 'pyconfig.h.in') - self.assertTrue(os.path.exists(pyconfig_h_in), pyconfig_h_in) - elif os.name == 'posix': - self.assertEqual( - os.path.dirname(sysconfig.get_makefile_filename()), - srcdir) - - def test_srcdir_independent_of_cwd(self): - # srcdir should be independent of the current working directory - # See Issues #15322, #15364. - srcdir = sysconfig.get_config_var('srcdir') - cwd = os.getcwd() - try: - os.chdir('..') - srcdir2 = sysconfig.get_config_var('srcdir') - finally: - os.chdir(cwd) - self.assertEqual(srcdir, srcdir2) - - def customize_compiler(self): - # make sure AR gets caught - class compiler: - compiler_type = 'unix' - - def set_executables(self, **kw): - self.exes = kw - - sysconfig_vars = { - 'AR': 'sc_ar', - 'CC': 'sc_cc', - 'CXX': 'sc_cxx', - 'ARFLAGS': '--sc-arflags', - 'CFLAGS': '--sc-cflags', - 'CCSHARED': '--sc-ccshared', - 'LDSHARED': 'sc_ldshared', - 'SHLIB_SUFFIX': 'sc_shutil_suffix', - - # On macOS, disable _osx_support.customize_compiler() - 'CUSTOMIZED_OSX_COMPILER': 'True', - } - - comp = compiler() - with contextlib.ExitStack() as cm: - for key, value in sysconfig_vars.items(): - cm.enter_context(swap_item(sysconfig._config_vars, key, value)) - sysconfig.customize_compiler(comp) - - return comp - - @unittest.skipUnless(get_default_compiler() == 'unix', - 'not testing if default compiler is not unix') - def test_customize_compiler(self): - # Make sure that sysconfig._config_vars is initialized - sysconfig.get_config_vars() - - os.environ['AR'] = 'env_ar' - os.environ['CC'] = 'env_cc' - os.environ['CPP'] = 'env_cpp' - os.environ['CXX'] = 'env_cxx --env-cxx-flags' - os.environ['LDSHARED'] = 'env_ldshared' - os.environ['LDFLAGS'] = '--env-ldflags' - os.environ['ARFLAGS'] = '--env-arflags' - os.environ['CFLAGS'] = '--env-cflags' - os.environ['CPPFLAGS'] = '--env-cppflags' - - comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'env_ar --env-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'env_cpp --env-cppflags') - self.assertEqual(comp.exes['compiler'], - 'env_cc --sc-cflags --env-cflags --env-cppflags') - self.assertEqual(comp.exes['compiler_so'], - ('env_cc --sc-cflags ' - '--env-cflags ''--env-cppflags --sc-ccshared')) - self.assertEqual(comp.exes['compiler_cxx'], - 'env_cxx --env-cxx-flags') - self.assertEqual(comp.exes['linker_exe'], - 'env_cc') - self.assertEqual(comp.exes['linker_so'], - ('env_ldshared --env-ldflags --env-cflags' - ' --env-cppflags')) - self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') - - del os.environ['AR'] - del os.environ['CC'] - del os.environ['CPP'] - del os.environ['CXX'] - del os.environ['LDSHARED'] - del os.environ['LDFLAGS'] - del os.environ['ARFLAGS'] - del os.environ['CFLAGS'] - del os.environ['CPPFLAGS'] - - comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'sc_ar --sc-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'sc_cc -E') - self.assertEqual(comp.exes['compiler'], - 'sc_cc --sc-cflags') - self.assertEqual(comp.exes['compiler_so'], - 'sc_cc --sc-cflags --sc-ccshared') - self.assertEqual(comp.exes['compiler_cxx'], - 'sc_cxx') - self.assertEqual(comp.exes['linker_exe'], - 'sc_cc') - self.assertEqual(comp.exes['linker_so'], - 'sc_ldshared') - self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') - - def test_parse_makefile_base(self): - self.makefile = TESTFN - fd = open(self.makefile, 'w') - try: - fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n') - fd.write('VAR=$OTHER\nOTHER=foo') - finally: - fd.close() - d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", - 'OTHER': 'foo'}) - - def test_parse_makefile_literal_dollar(self): - self.makefile = TESTFN - fd = open(self.makefile, 'w') - try: - fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n') - fd.write('VAR=$OTHER\nOTHER=foo') - finally: - fd.close() - d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", - 'OTHER': 'foo'}) - - - def test_sysconfig_module(self): - import sysconfig as global_sysconfig - self.assertEqual(global_sysconfig.get_config_var('CFLAGS'), - sysconfig.get_config_var('CFLAGS')) - self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'), - sysconfig.get_config_var('LDFLAGS')) - - @unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'), - 'compiler flags customized') - def test_sysconfig_compiler_vars(self): - # On OS X, binary installers support extension module building on - # various levels of the operating system with differing Xcode - # configurations. This requires customization of some of the - # compiler configuration directives to suit the environment on - # the installed machine. Some of these customizations may require - # running external programs and, so, are deferred until needed by - # the first extension module build. With Python 3.3, only - # the Distutils version of sysconfig is used for extension module - # builds, which happens earlier in the Distutils tests. This may - # cause the following tests to fail since no tests have caused - # the global version of sysconfig to call the customization yet. - # The solution for now is to simply skip this test in this case. - # The longer-term solution is to only have one version of sysconfig. - - import sysconfig as global_sysconfig - if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'): - self.skipTest('compiler flags customized') - self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), - sysconfig.get_config_var('LDSHARED')) - self.assertEqual(global_sysconfig.get_config_var('CC'), - sysconfig.get_config_var('CC')) - - @requires_subprocess() - def test_customize_compiler_before_get_config_vars(self): - # Issue #21923: test that a Distribution compiler - # instance can be called without an explicit call to - # get_config_vars(). - with open(TESTFN, 'w') as f: - f.writelines(textwrap.dedent('''\ - from distutils.core import Distribution - config = Distribution().get_command_obj('config') - # try_compile may pass or it may fail if no compiler - # is found but it should not raise an exception. - rc = config.try_compile('int x;') - ''')) - p = subprocess.Popen([str(sys.executable), TESTFN], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True) - outs, errs = p.communicate() - self.assertEqual(0, p.returncode, "Subprocess failed: " + outs) - - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(SysconfigTestCase)) - return suite - - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_text_file.py b/Lib/distutils/tests/test_text_file.py deleted file mode 100644 index ebac3d52f9097c..00000000000000 --- a/Lib/distutils/tests/test_text_file.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Tests for distutils.text_file.""" -import os -import unittest -from distutils.text_file import TextFile -from distutils.tests import support -from test.support import run_unittest - -TEST_DATA = """# test file - -line 3 \\ -# intervening comment - continues on next line -""" - -class TextFileTestCase(support.TempdirManager, unittest.TestCase): - - def test_class(self): - # old tests moved from text_file.__main__ - # so they are really called by the buildbots - - # result 1: no fancy options - result1 = ['# test file\n', '\n', 'line 3 \\\n', - '# intervening comment\n', - ' continues on next line\n'] - - # result 2: just strip comments - result2 = ["\n", - "line 3 \\\n", - " continues on next line\n"] - - # result 3: just strip blank lines - result3 = ["# test file\n", - "line 3 \\\n", - "# intervening comment\n", - " continues on next line\n"] - - # result 4: default, strip comments, blank lines, - # and trailing whitespace - result4 = ["line 3 \\", - " continues on next line"] - - # result 5: strip comments and blanks, plus join lines (but don't - # "collapse" joined lines - result5 = ["line 3 continues on next line"] - - # result 6: strip comments and blanks, plus join lines (and - # "collapse" joined lines - result6 = ["line 3 continues on next line"] - - def test_input(count, description, file, expected_result): - result = file.readlines() - self.assertEqual(result, expected_result) - - tmpdir = self.mkdtemp() - filename = os.path.join(tmpdir, "test.txt") - out_file = open(filename, "w") - try: - out_file.write(TEST_DATA) - finally: - out_file.close() - - in_file = TextFile(filename, strip_comments=0, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) - try: - test_input(1, "no processing", in_file, result1) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=1, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) - try: - test_input(2, "strip comments", in_file, result2) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=0, skip_blanks=1, - lstrip_ws=0, rstrip_ws=0) - try: - test_input(3, "strip blanks", in_file, result3) - finally: - in_file.close() - - in_file = TextFile(filename) - try: - test_input(4, "default processing", in_file, result4) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1) - try: - test_input(5, "join lines without collapsing", in_file, result5) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1, collapse_join=1) - try: - test_input(6, "join lines with collapsing", in_file, result6) - finally: - in_file.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(TextFileTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_unixccompiler.py b/Lib/distutils/tests/test_unixccompiler.py deleted file mode 100644 index a3484d4f94cd92..00000000000000 --- a/Lib/distutils/tests/test_unixccompiler.py +++ /dev/null @@ -1,145 +0,0 @@ -"""Tests for distutils.unixccompiler.""" -import sys -import unittest -from test.support import run_unittest -from test.support.os_helper import EnvironmentVarGuard - -from distutils import sysconfig -from distutils.unixccompiler import UnixCCompiler - -class UnixCCompilerTestCase(unittest.TestCase): - - def setUp(self): - self._backup_platform = sys.platform - self._backup_get_config_var = sysconfig.get_config_var - self._backup_config_vars = dict(sysconfig._config_vars) - class CompilerWrapper(UnixCCompiler): - def rpath_foo(self): - return self.runtime_library_dir_option('/foo') - self.cc = CompilerWrapper() - - def tearDown(self): - sys.platform = self._backup_platform - sysconfig.get_config_var = self._backup_get_config_var - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self._backup_config_vars) - - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") - def test_runtime_libdir_option(self): - # Issue#5900 - # - # Ensure RUNPATH is added to extension modules with RPATH if - # GNU ld is used - - # darwin - sys.platform = 'darwin' - self.assertEqual(self.cc.rpath_foo(), '-L/foo') - - # hp-ux - sys.platform = 'hp-ux' - old_gcv = sysconfig.get_config_var - def gcv(v): - return 'xxx' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo']) - - def gcv(v): - return 'gcc' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) - - def gcv(v): - return 'g++' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) - - sysconfig.get_config_var = old_gcv - - # GCC GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'gcc' - elif v == 'GNULD': - return 'yes' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') - - # GCC non-GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'gcc' - elif v == 'GNULD': - return 'no' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo') - - # GCC GNULD with fully qualified configuration prefix - # see #7617 - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'x86_64-pc-linux-gnu-gcc-4.4.2' - elif v == 'GNULD': - return 'yes' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') - - # non-GCC GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'cc' - elif v == 'GNULD': - return 'yes' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-R/foo') - - # non-GCC non-GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'cc' - elif v == 'GNULD': - return 'no' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-R/foo') - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X') - def test_osx_cc_overrides_ldshared(self): - # Issue #18080: - # ensure that setting CC env variable also changes default linker - def gcv(v): - if v == 'LDSHARED': - return 'gcc-4.2 -bundle -undefined dynamic_lookup ' - return 'gcc-4.2' - sysconfig.get_config_var = gcv - with EnvironmentVarGuard() as env: - env['CC'] = 'my_cc' - del env['LDSHARED'] - sysconfig.customize_compiler(self.cc) - self.assertEqual(self.cc.linker_so[0], 'my_cc') - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X') - def test_osx_explicit_ldshared(self): - # Issue #18080: - # ensure that setting CC env variable does not change - # explicit LDSHARED setting for linker - def gcv(v): - if v == 'LDSHARED': - return 'gcc-4.2 -bundle -undefined dynamic_lookup ' - return 'gcc-4.2' - sysconfig.get_config_var = gcv - with EnvironmentVarGuard() as env: - env['CC'] = 'my_cc' - env['LDSHARED'] = 'my_ld -bundle -dynamic' - sysconfig.customize_compiler(self.cc) - self.assertEqual(self.cc.linker_so[0], 'my_ld') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(UnixCCompilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py deleted file mode 100644 index d6797414883060..00000000000000 --- a/Lib/distutils/tests/test_upload.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Tests for distutils.command.upload.""" -import os -import unittest -import unittest.mock as mock -from urllib.error import HTTPError - -from test.support import run_unittest - -from distutils.command import upload as upload_mod -from distutils.command.upload import upload -from distutils.core import Distribution -from distutils.errors import DistutilsError -from distutils.log import ERROR, INFO - -from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase - -PYPIRC_LONG_PASSWORD = """\ -[distutils] - -index-servers = - server1 - server2 - -[server1] -username:me -password:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa - -[server2] -username:meagain -password: secret -realm:acme -repository:http://another.pypi/ -""" - - -PYPIRC_NOPASSWORD = """\ -[distutils] - -index-servers = - server1 - -[server1] -username:me -""" - -class FakeOpen(object): - - def __init__(self, url, msg=None, code=None): - self.url = url - if not isinstance(url, str): - self.req = url - else: - self.req = None - self.msg = msg or 'OK' - self.code = code or 200 - - def getheader(self, name, default=None): - return { - 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) - - def read(self): - return b'xyzzy' - - def getcode(self): - return self.code - - -class uploadTestCase(BasePyPIRCCommandTestCase): - - def setUp(self): - super(uploadTestCase, self).setUp() - self.old_open = upload_mod.urlopen - upload_mod.urlopen = self._urlopen - self.last_open = None - self.next_msg = None - self.next_code = None - - def tearDown(self): - upload_mod.urlopen = self.old_open - super(uploadTestCase, self).tearDown() - - def _urlopen(self, url): - self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code) - return self.last_open - - def test_finalize_options(self): - - # new format - self.write_file(self.rc, PYPIRC) - dist = Distribution() - cmd = upload(dist) - cmd.finalize_options() - for attr, waited in (('username', 'me'), ('password', 'secret'), - ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/')): - self.assertEqual(getattr(cmd, attr), waited) - - def test_saved_password(self): - # file with no password - self.write_file(self.rc, PYPIRC_NOPASSWORD) - - # make sure it passes - dist = Distribution() - cmd = upload(dist) - cmd.finalize_options() - self.assertEqual(cmd.password, None) - - # make sure we get it as well, if another command - # initialized it at the dist level - dist.password = 'xxx' - cmd = upload(dist) - cmd.finalize_options() - self.assertEqual(cmd.password, 'xxx') - - def test_upload(self): - tmp = self.mkdtemp() - path = os.path.join(tmp, 'xxx') - self.write_file(path) - command, pyversion, filename = 'xxx', '2.6', path - dist_files = [(command, pyversion, filename)] - self.write_file(self.rc, PYPIRC_LONG_PASSWORD) - - # lets run it - pkg_dir, dist = self.create_dist(dist_files=dist_files) - cmd = upload(dist) - cmd.show_response = 1 - cmd.ensure_finalized() - cmd.run() - - # what did we send ? - headers = dict(self.last_open.req.headers) - self.assertGreaterEqual(int(headers['Content-length']), 2162) - content_type = headers['Content-type'] - self.assertTrue(content_type.startswith('multipart/form-data')) - self.assertEqual(self.last_open.req.get_method(), 'POST') - expected_url = 'https://upload.pypi.org/legacy/' - self.assertEqual(self.last_open.req.get_full_url(), expected_url) - data = self.last_open.req.data - self.assertIn(b'xxx',data) - self.assertIn(b'protocol_version', data) - self.assertIn(b'sha256_digest', data) - self.assertIn( - b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf' - b'6860', - data - ) - if b'md5_digest' in data: - self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data) - if b'blake2_256_digest' in data: - self.assertIn( - b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be' - b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc' - b'ce443f1534330a', - data - ) - - # The PyPI response body was echoed - results = self.get_logs(INFO) - self.assertEqual(results[-1], 75 * '-' + '\nxyzzy\n' + 75 * '-') - - # bpo-32304: archives whose last byte was b'\r' were corrupted due to - # normalization intended for Mac OS 9. - def test_upload_correct_cr(self): - # content that ends with \r should not be modified. - tmp = self.mkdtemp() - path = os.path.join(tmp, 'xxx') - self.write_file(path, content='yy\r') - command, pyversion, filename = 'xxx', '2.6', path - dist_files = [(command, pyversion, filename)] - self.write_file(self.rc, PYPIRC_LONG_PASSWORD) - - # other fields that ended with \r used to be modified, now are - # preserved. - pkg_dir, dist = self.create_dist( - dist_files=dist_files, - description='long description\r' - ) - cmd = upload(dist) - cmd.show_response = 1 - cmd.ensure_finalized() - cmd.run() - - headers = dict(self.last_open.req.headers) - self.assertGreaterEqual(int(headers['Content-length']), 2172) - self.assertIn(b'long description\r', self.last_open.req.data) - - def test_upload_fails(self): - self.next_msg = "Not Found" - self.next_code = 404 - self.assertRaises(DistutilsError, self.test_upload) - - def test_wrong_exception_order(self): - tmp = self.mkdtemp() - path = os.path.join(tmp, 'xxx') - self.write_file(path) - dist_files = [('xxx', '2.6', path)] # command, pyversion, filename - self.write_file(self.rc, PYPIRC_LONG_PASSWORD) - - pkg_dir, dist = self.create_dist(dist_files=dist_files) - tests = [ - (OSError('oserror'), 'oserror', OSError), - (HTTPError('url', 400, 'httperror', {}, None), - 'Upload failed (400): httperror', DistutilsError), - ] - for exception, expected, raised_exception in tests: - with self.subTest(exception=type(exception).__name__): - with mock.patch('distutils.command.upload.urlopen', - new=mock.Mock(side_effect=exception)): - with self.assertRaises(raised_exception): - cmd = upload(dist) - cmd.ensure_finalized() - cmd.run() - results = self.get_logs(ERROR) - self.assertIn(expected, results[-1]) - self.clear_logs() - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(uploadTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py deleted file mode 100644 index f9c223f06ec68d..00000000000000 --- a/Lib/distutils/tests/test_util.py +++ /dev/null @@ -1,313 +0,0 @@ -"""Tests for distutils.util.""" -import os -import sys -import unittest -from copy import copy -from test.support import run_unittest -from unittest import mock - -from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError -from distutils.util import (get_platform, convert_path, change_root, - check_environ, split_quoted, strtobool, - rfc822_escape, byte_compile, - grok_environment_error) -from distutils import util # used to patch _environ_checked -from distutils.sysconfig import get_config_vars -from distutils import sysconfig -from distutils.tests import support -import _osx_support - -class UtilTestCase(support.EnvironGuard, unittest.TestCase): - - def setUp(self): - super(UtilTestCase, self).setUp() - # saving the environment - self.name = os.name - self.platform = sys.platform - self.version = sys.version - self.sep = os.sep - self.join = os.path.join - self.isabs = os.path.isabs - self.splitdrive = os.path.splitdrive - self._config_vars = copy(sysconfig._config_vars) - - # patching os.uname - if hasattr(os, 'uname'): - self.uname = os.uname - self._uname = os.uname() - else: - self.uname = None - self._uname = None - - os.uname = self._get_uname - - def tearDown(self): - # getting back the environment - os.name = self.name - sys.platform = self.platform - sys.version = self.version - os.sep = self.sep - os.path.join = self.join - os.path.isabs = self.isabs - os.path.splitdrive = self.splitdrive - if self.uname is not None: - os.uname = self.uname - else: - del os.uname - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self._config_vars) - super(UtilTestCase, self).tearDown() - - def _set_uname(self, uname): - self._uname = uname - - def _get_uname(self): - return self._uname - - def test_get_platform(self): - - # windows XP, 32bits - os.name = 'nt' - sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' - '[MSC v.1310 32 bit (Intel)]') - sys.platform = 'win32' - self.assertEqual(get_platform(), 'win32') - - # windows XP, amd64 - os.name = 'nt' - sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' - '[MSC v.1310 32 bit (Amd64)]') - sys.platform = 'win32' - self.assertEqual(get_platform(), 'win-amd64') - - # macbook - os.name = 'posix' - sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') - sys.platform = 'darwin' - self._set_uname(('Darwin', 'macziade', '8.11.1', - ('Darwin Kernel Version 8.11.1: ' - 'Wed Oct 10 18:23:28 PDT 2007; ' - 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' - - get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' - '-fwrapv -O3 -Wall -Wstrict-prototypes') - - cursize = sys.maxsize - sys.maxsize = (2 ** 31)-1 - try: - self.assertEqual(get_platform(), 'macosx-10.3-i386') - finally: - sys.maxsize = cursize - - # macbook with fat binaries (fat, universal or fat64) - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' - get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-fat') - - _osx_support._remove_original_values(get_config_vars()) - os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.1' - self.assertEqual(get_platform(), 'macosx-10.4-fat') - - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-intel') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - self.assertEqual(get_platform(), 'macosx-10.4-fat3') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - self.assertEqual(get_platform(), 'macosx-10.4-universal') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-fat64') - - for arch in ('ppc', 'i386', 'x86_64', 'ppc64'): - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3'%(arch,)) - - self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,)) - - - # linux debian sarge - os.name = 'posix' - sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' - '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') - sys.platform = 'linux2' - self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', - '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) - - self.assertEqual(get_platform(), 'linux-i686') - - # XXX more platforms to tests here - - def test_convert_path(self): - # linux/mac - os.sep = '/' - def _join(path): - return '/'.join(path) - os.path.join = _join - - self.assertEqual(convert_path('/home/to/my/stuff'), - '/home/to/my/stuff') - - # win - os.sep = '\\' - def _join(*path): - return '\\'.join(path) - os.path.join = _join - - self.assertRaises(ValueError, convert_path, '/home/to/my/stuff') - self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/') - - self.assertEqual(convert_path('home/to/my/stuff'), - 'home\\to\\my\\stuff') - self.assertEqual(convert_path('.'), - os.curdir) - - def test_change_root(self): - # linux/mac - os.name = 'posix' - def _isabs(path): - return path[0] == '/' - os.path.isabs = _isabs - def _join(*path): - return '/'.join(path) - os.path.join = _join - - self.assertEqual(change_root('/root', '/old/its/here'), - '/root/old/its/here') - self.assertEqual(change_root('/root', 'its/here'), - '/root/its/here') - - # windows - os.name = 'nt' - def _isabs(path): - return path.startswith('c:\\') - os.path.isabs = _isabs - def _splitdrive(path): - if path.startswith('c:'): - return ('', path.replace('c:', '')) - return ('', path) - os.path.splitdrive = _splitdrive - def _join(*path): - return '\\'.join(path) - os.path.join = _join - - self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'), - 'c:\\root\\old\\its\\here') - self.assertEqual(change_root('c:\\root', 'its\\here'), - 'c:\\root\\its\\here') - - # BugsBunny os (it's a great os) - os.name = 'BugsBunny' - self.assertRaises(DistutilsPlatformError, - change_root, 'c:\\root', 'its\\here') - - # XXX platforms to be covered: mac - - def test_check_environ(self): - util._environ_checked = 0 - os.environ.pop('HOME', None) - - check_environ() - - self.assertEqual(os.environ['PLAT'], get_platform()) - self.assertEqual(util._environ_checked, 1) - - @unittest.skipUnless(os.name == 'posix', 'specific to posix') - def test_check_environ_getpwuid(self): - util._environ_checked = 0 - os.environ.pop('HOME', None) - - try: - import pwd - except ImportError: - raise unittest.SkipTest("Test requires pwd module.") - - # only set pw_dir field, other fields are not used - result = pwd.struct_passwd((None, None, None, None, None, - '/home/distutils', None)) - with mock.patch.object(pwd, 'getpwuid', return_value=result): - check_environ() - self.assertEqual(os.environ['HOME'], '/home/distutils') - - util._environ_checked = 0 - os.environ.pop('HOME', None) - - # bpo-10496: Catch pwd.getpwuid() error - with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError): - check_environ() - self.assertNotIn('HOME', os.environ) - - def test_split_quoted(self): - self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'), - ['one', 'two', 'three', 'four']) - - def test_strtobool(self): - yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1') - no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N') - - for y in yes: - self.assertTrue(strtobool(y)) - - for n in no: - self.assertFalse(strtobool(n)) - - def test_rfc822_escape(self): - header = 'I am a\npoor\nlonesome\nheader\n' - res = rfc822_escape(header) - wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' - 'header%(8s)s') % {'8s': '\n'+8*' '} - self.assertEqual(res, wanted) - - def test_dont_write_bytecode(self): - # makes sure byte_compile raise a DistutilsError - # if sys.dont_write_bytecode is True - old_dont_write_bytecode = sys.dont_write_bytecode - sys.dont_write_bytecode = True - try: - self.assertRaises(DistutilsByteCompileError, byte_compile, []) - finally: - sys.dont_write_bytecode = old_dont_write_bytecode - - def test_grok_environment_error(self): - # test obsolete function to ensure backward compat (#4931) - exc = IOError("Unable to find batch file") - msg = grok_environment_error(exc) - self.assertEqual(msg, "error: Unable to find batch file") - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(UtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_version.py b/Lib/distutils/tests/test_version.py deleted file mode 100644 index 1563e0227b60dd..00000000000000 --- a/Lib/distutils/tests/test_version.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Tests for distutils.version.""" -import unittest -from distutils.version import LooseVersion -from distutils.version import StrictVersion -from test.support import run_unittest - -class VersionTestCase(unittest.TestCase): - - def test_prerelease(self): - version = StrictVersion('1.2.3a1') - self.assertEqual(version.version, (1, 2, 3)) - self.assertEqual(version.prerelease, ('a', 1)) - self.assertEqual(str(version), '1.2.3a1') - - version = StrictVersion('1.2.0') - self.assertEqual(str(version), '1.2') - - def test_cmp_strict(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', ValueError), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', ValueError), - ('3.2.pl0', '3.1.1.6', ValueError), - ('2g6', '11g', ValueError), - ('0.9', '2.2', -1), - ('1.2.1', '1.2', 1), - ('1.1', '1.2.2', -1), - ('1.2', '1.1', 1), - ('1.2.1', '1.2.2', -1), - ('1.2.2', '1.2', 1), - ('1.2', '1.2.2', -1), - ('0.4.0', '0.4', 0), - ('1.13++', '5.5.kw', ValueError)) - - for v1, v2, wanted in versions: - try: - res = StrictVersion(v1)._cmp(StrictVersion(v2)) - except ValueError: - if wanted is ValueError: - continue - else: - raise AssertionError(("cmp(%s, %s) " - "shouldn't raise ValueError") - % (v1, v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = StrictVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = StrictVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) - - - def test_cmp(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', 1), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', -1), - ('3.2.pl0', '3.1.1.6', 1), - ('2g6', '11g', -1), - ('0.960923', '2.2beta29', -1), - ('1.13++', '5.5.kw', -1)) - - - for v1, v2, wanted in versions: - res = LooseVersion(v1)._cmp(LooseVersion(v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = LooseVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = LooseVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(VersionTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_versionpredicate.py b/Lib/distutils/tests/test_versionpredicate.py deleted file mode 100644 index 28ae09dc2058da..00000000000000 --- a/Lib/distutils/tests/test_versionpredicate.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests harness for distutils.versionpredicate. - -""" - -import distutils.versionpredicate -import doctest -from test.support import run_unittest - -def test_suite(): - return doctest.DocTestSuite(distutils.versionpredicate) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/text_file.py b/Lib/distutils/text_file.py deleted file mode 100644 index 93abad38f434d7..00000000000000 --- a/Lib/distutils/text_file.py +++ /dev/null @@ -1,286 +0,0 @@ -"""text_file - -provides the TextFile class, which gives an interface to text files -that (optionally) takes care of stripping comments, ignoring blank -lines, and joining lines with backslashes.""" - -import sys, io - - -class TextFile: - """Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not.""" - - default_options = { 'strip_comments': 1, - 'skip_blanks': 1, - 'lstrip_ws': 0, - 'rstrip_ws': 1, - 'join_lines': 0, - 'collapse_join': 0, - 'errors': 'strict', - } - - def __init__(self, filename=None, file=None, **options): - """Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'.""" - if filename is None and file is None: - raise RuntimeError("you must supply either or both of 'filename' and 'file'") - - # set values for all options -- either from client option hash - # or fallback to default_options - for opt in self.default_options.keys(): - if opt in options: - setattr(self, opt, options[opt]) - else: - setattr(self, opt, self.default_options[opt]) - - # sanity check client option hash - for opt in options.keys(): - if opt not in self.default_options: - raise KeyError("invalid TextFile option '%s'" % opt) - - if file is None: - self.open(filename) - else: - self.filename = filename - self.file = file - self.current_line = 0 # assuming that file is at BOF! - - # 'linebuf' is a stack of lines that will be emptied before we - # actually read from the file; it's only populated by an - # 'unreadline()' operation - self.linebuf = [] - - def open(self, filename): - """Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor.""" - self.filename = filename - self.file = io.open(self.filename, 'r', errors=self.errors) - self.current_line = 0 - - def close(self): - """Close the current file and forget everything we know about it - (filename, current line number).""" - file = self.file - self.file = None - self.filename = None - self.current_line = None - file.close() - - def gen_error(self, msg, line=None): - outmsg = [] - if line is None: - line = self.current_line - outmsg.append(self.filename + ", ") - if isinstance(line, (list, tuple)): - outmsg.append("lines %d-%d: " % tuple(line)) - else: - outmsg.append("line %d: " % line) - outmsg.append(str(msg)) - return "".join(outmsg) - - def error(self, msg, line=None): - raise ValueError("error: " + self.gen_error(msg, line)) - - def warn(self, msg, line=None): - """Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line.""" - sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") - - def readline(self): - """Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not.""" - # If any "unread" lines waiting in 'linebuf', return the top - # one. (We don't actually buffer read-ahead data -- lines only - # get put in 'linebuf' if the client explicitly does an - # 'unreadline()'. - if self.linebuf: - line = self.linebuf[-1] - del self.linebuf[-1] - return line - - buildup_line = '' - - while True: - # read the line, make it None if EOF - line = self.file.readline() - if line == '': - line = None - - if self.strip_comments and line: - - # Look for the first "#" in the line. If none, never - # mind. If we find one and it's the first character, or - # is not preceded by "\", then it starts a comment -- - # strip the comment, strip whitespace before it, and - # carry on. Otherwise, it's just an escaped "#", so - # unescape it (and any other escaped "#"'s that might be - # lurking in there) and otherwise leave the line alone. - - pos = line.find("#") - if pos == -1: # no "#" -- no comments - pass - - # It's definitely a comment -- either "#" is the first - # character, or it's elsewhere and unescaped. - elif pos == 0 or line[pos-1] != "\\": - # Have to preserve the trailing newline, because it's - # the job of a later step (rstrip_ws) to remove it -- - # and if rstrip_ws is false, we'd better preserve it! - # (NB. this means that if the final line is all comment - # and has no trailing newline, we will think that it's - # EOF; I think that's OK.) - eol = (line[-1] == '\n') and '\n' or '' - line = line[0:pos] + eol - - # If all that's left is whitespace, then skip line - # *now*, before we try to join it to 'buildup_line' -- - # that way constructs like - # hello \\ - # # comment that should be ignored - # there - # result in "hello there". - if line.strip() == "": - continue - else: # it's an escaped "#" - line = line.replace("\\#", "#") - - # did previous line end with a backslash? then accumulate - if self.join_lines and buildup_line: - # oops: end of file - if line is None: - self.warn("continuation line immediately precedes " - "end-of-file") - return buildup_line - - if self.collapse_join: - line = line.lstrip() - line = buildup_line + line - - # careful: pay attention to line number when incrementing it - if isinstance(self.current_line, list): - self.current_line[1] = self.current_line[1] + 1 - else: - self.current_line = [self.current_line, - self.current_line + 1] - # just an ordinary line, read it as usual - else: - if line is None: # eof - return None - - # still have to be careful about incrementing the line number! - if isinstance(self.current_line, list): - self.current_line = self.current_line[1] + 1 - else: - self.current_line = self.current_line + 1 - - # strip whitespace however the client wants (leading and - # trailing, or one or the other, or neither) - if self.lstrip_ws and self.rstrip_ws: - line = line.strip() - elif self.lstrip_ws: - line = line.lstrip() - elif self.rstrip_ws: - line = line.rstrip() - - # blank line (whether we rstrip'ed or not)? skip to next line - # if appropriate - if (line == '' or line == '\n') and self.skip_blanks: - continue - - if self.join_lines: - if line[-1] == '\\': - buildup_line = line[:-1] - continue - - if line[-2:] == '\\\n': - buildup_line = line[0:-2] + '\n' - continue - - # well, I guess there's some actual content there: return it - return line - - def readlines(self): - """Read and return the list of all logical lines remaining in the - current file.""" - lines = [] - while True: - line = self.readline() - if line is None: - return lines - lines.append(line) - - def unreadline(self, line): - """Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead.""" - self.linebuf.append(line) diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py deleted file mode 100644 index d00c48981eb6d6..00000000000000 --- a/Lib/distutils/unixccompiler.py +++ /dev/null @@ -1,329 +0,0 @@ -"""distutils.unixccompiler - -Contains the UnixCCompiler class, a subclass of CCompiler that handles -the "typical" Unix-style command-line C compiler: - * macros defined with -Dname[=value] - * macros undefined with -Uname - * include search directories specified with -Idir - * libraries specified with -lllib - * library search directories specified with -Ldir - * compile handled by 'cc' (or similar) executable with -c option: - compiles .c to .o - * link static library handled by 'ar' command (possibly with 'ranlib') - * link shared library handled by 'cc -shared' -""" - -import os, sys, re - -from distutils import sysconfig -from distutils.dep_util import newer -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options, gen_lib_options -from distutils.errors import \ - DistutilsExecError, CompileError, LibError, LinkError -from distutils import log - -if sys.platform == 'darwin': - import _osx_support - -# XXX Things not currently handled: -# * optimization/debug/warning flags; we just use whatever's in Python's -# Makefile and live with it. Is this adequate? If not, we might -# have to have a bunch of subclasses GNUCCompiler, SGICCompiler, -# SunCCompiler, and I suspect down that road lies madness. -# * even if we don't know a warning flag from an optimization flag, -# we need some way for outsiders to feed preprocessor/compiler/linker -# flags in to us -- eg. a sysadmin might want to mandate certain flags -# via a site config file, or a user might want to set something for -# compiling this module distribution only via the setup.py command -# line, whatever. As long as these options come from something on the -# current system, they can be as system-dependent as they like, and we -# should just happily stuff them into the preprocessor/compiler/linker -# options and carry on. - - -class UnixCCompiler(CCompiler): - - compiler_type = 'unix' - - # These are used by CCompiler in two places: the constructor sets - # instance attributes 'preprocessor', 'compiler', etc. from them, and - # 'set_executable()' allows any of these to be set. The defaults here - # are pretty generic; they will probably have to be set by an outsider - # (eg. using information discovered by the sysconfig about building - # Python extensions). - executables = {'preprocessor' : None, - 'compiler' : ["cc"], - 'compiler_so' : ["cc"], - 'compiler_cxx' : ["cc"], - 'linker_so' : ["cc", "-shared"], - 'linker_exe' : ["cc"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : None, - } - - if sys.platform[:6] == "darwin": - executables['ranlib'] = ["ranlib"] - - # Needed for the filename generation methods provided by the base - # class, CCompiler. NB. whoever instantiates/uses a particular - # UnixCCompiler instance should set 'shared_lib_ext' -- we set a - # reasonable common default here, but it's not necessarily used on all - # Unices! - - src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".so" - dylib_lib_extension = ".dylib" - xcode_stub_lib_extension = ".tbd" - static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" - xcode_stub_lib_format = dylib_lib_format - if sys.platform == "cygwin": - exe_extension = ".exe" - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): - fixed_args = self._fix_compile_args(None, macros, include_dirs) - ignore, macros, include_dirs = fixed_args - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = self.preprocessor + pp_opts - if output_file: - pp_args.extend(['-o', output_file]) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or we're - # generating output to stdout, or there's a target output file and - # the source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except DistutilsExecError as msg: - raise CompileError(msg) - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = self.compiler_so - if sys.platform == 'darwin': - compiler_so = _osx_support.compiler_fixup(compiler_so, - cc_args + extra_postargs) - try: - self.spawn(compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - def create_static_lib(self, objects, output_libname, - output_dir=None, debug=0, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - self.mkpath(os.path.dirname(output_filename)) - self.spawn(self.archiver + - [output_filename] + - objects + self.objects) - - # Not many Unices required ranlib anymore -- SunOS 4.x is, I - # think the only major Unix that does. Maybe we need some - # platform intelligence here to skip ranlib if it's not - # needed -- or maybe Python's configure script took care of - # it for us, hence the check for leading colon. - if self.ranlib: - try: - self.spawn(self.ranlib + [output_filename]) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, target_desc, objects, - output_filename, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - libraries, library_dirs, runtime_library_dirs = fixed_args - - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, - libraries) - if not isinstance(output_dir, (str, type(None))): - raise TypeError("'output_dir' must be a string or None") - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - ld_args = (objects + self.objects + - lib_opts + ['-o', output_filename]) - if debug: - ld_args[:0] = ['-g'] - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - self.mkpath(os.path.dirname(output_filename)) - try: - if target_desc == CCompiler.EXECUTABLE: - linker = self.linker_exe[:] - else: - linker = self.linker_so[:] - if target_lang == "c++" and self.compiler_cxx: - # skip over environment variable settings if /usr/bin/env - # is used to set up the linker's environment. - # This is needed on OSX. Note: this assumes that the - # normal and C++ compiler have the same environment - # settings. - i = 0 - if os.path.basename(linker[0]) == "env": - i = 1 - while '=' in linker[i]: - i += 1 - - if os.path.basename(linker[i]) == 'ld_so_aix': - # AIX platforms prefix the compiler with the ld_so_aix - # script, so we need to adjust our linker index - offset = 1 - else: - offset = 0 - - linker[i+offset] = self.compiler_cxx[i] - - if sys.platform == 'darwin': - linker = _osx_support.compiler_fixup(linker, ld_args) - - self.spawn(linker + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "-L" + dir - - def _is_gcc(self, compiler_name): - # clang uses same syntax for rpath as gcc - return any(name in compiler_name for name in ("gcc", "g++", "clang")) - - def runtime_library_dir_option(self, dir): - # XXX Hackish, at the very least. See Python bug #445902: - # http://sourceforge.net/tracker/index.php - # ?func=detail&aid=445902&group_id=5470&atid=105470 - # Linkers on different platforms need different options to - # specify that directories need to be added to the list of - # directories searched for dependencies when a dynamic library - # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to - # be told to pass the -R option through to the linker, whereas - # other compilers and gcc on other systems just know this. - # Other compilers may need something slightly different. At - # this time, there's no way to determine this information from - # the configuration data stored in the Python installation, so - # we use this hack. - compiler = os.path.basename(sysconfig.get_config_var("CC")) - if sys.platform[:6] == "darwin": - # MacOSX's linker doesn't understand the -R flag at all - return "-L" + dir - elif sys.platform[:7] == "freebsd": - return "-Wl,-rpath=" + dir - elif sys.platform[:5] == "hp-ux": - if self._is_gcc(compiler): - return ["-Wl,+s", "-L" + dir] - return ["+s", "-L" + dir] - else: - if self._is_gcc(compiler): - # gcc on non-GNU systems does not need -Wl, but can - # use it anyway. Since distutils has always passed in - # -Wl whenever gcc was used in the past it is probably - # safest to keep doing so. - if sysconfig.get_config_var("GNULD") == "yes": - # GNU ld needs an extra option to get a RUNPATH - # instead of just an RPATH. - return "-Wl,--enable-new-dtags,-R" + dir - else: - return "-Wl,-R" + dir - else: - # No idea how --enable-new-dtags would be passed on to - # ld if this system was using GNU ld. Don't know if a - # system like this even exists. - return "-R" + dir - - def library_option(self, lib): - return "-l" + lib - - def find_library_file(self, dirs, lib, debug=0): - shared_f = self.library_filename(lib, lib_type='shared') - dylib_f = self.library_filename(lib, lib_type='dylib') - xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub') - static_f = self.library_filename(lib, lib_type='static') - - if sys.platform == 'darwin': - # On OSX users can specify an alternate SDK using - # '-isysroot', calculate the SDK root if it is specified - # (and use it further on) - # - # Note that, as of Xcode 7, Apple SDKs may contain textual stub - # libraries with .tbd extensions rather than the normal .dylib - # shared libraries installed in /. The Apple compiler tool - # chain handles this transparently but it can cause problems - # for programs that are being built with an SDK and searching - # for specific libraries. Callers of find_library_file need to - # keep in mind that the base filename of the returned SDK library - # file might have a different extension from that of the library - # file installed on the running system, for example: - # /Applications/Xcode.app/Contents/Developer/Platforms/ - # MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ - # usr/lib/libedit.tbd - # vs - # /usr/lib/libedit.dylib - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s*(\S+)', cflags) - if m is None: - sysroot = _osx_support._default_sysroot(sysconfig.get_config_var('CC')) - else: - sysroot = m.group(1) - - - - for dir in dirs: - shared = os.path.join(dir, shared_f) - dylib = os.path.join(dir, dylib_f) - static = os.path.join(dir, static_f) - xcode_stub = os.path.join(dir, xcode_stub_f) - - if sys.platform == 'darwin' and ( - dir.startswith('/System/') or ( - dir.startswith('/usr/') and not dir.startswith('/usr/local/'))): - - shared = os.path.join(sysroot, dir[1:], shared_f) - dylib = os.path.join(sysroot, dir[1:], dylib_f) - static = os.path.join(sysroot, dir[1:], static_f) - xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f) - - # We're second-guessing the linker here, with not much hard - # data to go on: GCC seems to prefer the shared library, so I'm - # assuming that *all* Unix C compilers do. And of course I'm - # ignoring even GCC's "-static" option. So sue me. - if os.path.exists(dylib): - return dylib - elif os.path.exists(xcode_stub): - return xcode_stub - elif os.path.exists(shared): - return shared - elif os.path.exists(static): - return static - - # Oops, didn't find it in *any* of 'dirs' - return None diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py deleted file mode 100644 index 2ce5c5b64d62fa..00000000000000 --- a/Lib/distutils/util.py +++ /dev/null @@ -1,562 +0,0 @@ -"""distutils.util - -Miscellaneous utility functions -- anything that doesn't fit into -one of the other *util.py modules. -""" - -import os -import re -import importlib.util -import string -import sys -import distutils -from distutils.errors import DistutilsPlatformError -from distutils.dep_util import newer -from distutils.spawn import spawn -from distutils import log -from distutils.errors import DistutilsByteCompileError - -def get_host_platform(): - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; eg. on Linux, the kernel version isn't - particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - - """ - if os.name == 'nt': - if 'amd64' in sys.version.lower(): - return 'win-amd64' - if '(arm)' in sys.version.lower(): - return 'win-arm32' - if '(arm64)' in sys.version.lower(): - return 'win-arm64' - return sys.platform - - # Set for cross builds explicitly - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - - if os.name != "posix" or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - - (osname, host, release, version, machine) = os.uname() - - # Convert the OS name to lowercase, remove '/' characters, and translate - # spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_') - machine = machine.replace('/', '-') - - if osname[:5] == "linux": - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - elif osname[:5] == "sunos": - if release[0] >= "5": # SunOS 5 == Solaris 2 - osname = "solaris" - release = "%d.%s" % (int(release[0]) - 3, release[2:]) - # We can't use "platform.architecture()[0]" because a - # bootstrap problem. We use a dict to get an error - # if some suspicious happens. - bitness = {2147483647:"32bit", 9223372036854775807:"64bit"} - machine += ".%s" % bitness[sys.maxsize] - # fall through to standard osname-release-machine representation - elif osname[:3] == "aix": - from _aix_support import aix_platform - return aix_platform() - elif osname[:6] == "cygwin": - osname = "cygwin" - rel_re = re.compile (r'[\d.]+', re.ASCII) - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == "darwin": - import _osx_support, distutils.sysconfig - osname, release, machine = _osx_support.get_platform_osx( - distutils.sysconfig.get_config_vars(), - osname, release, machine) - - return "%s-%s-%s" % (osname, release, machine) - -def get_platform(): - if os.name == 'nt': - TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', - } - return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform() - else: - return get_host_platform() - -def convert_path (pathname): - """Return 'pathname' as a name that will work on the native filesystem, - i.e. split it on '/' and put it back together again using the current - directory separator. Needed because filenames in the setup script are - always supplied in Unix style, and have to be converted to the local - convention before we can actually use them in the filesystem. Raises - ValueError on non-Unix-ish systems if 'pathname' either starts or - ends with a slash. - """ - if os.sep == '/': - return pathname - if not pathname: - return pathname - if pathname[0] == '/': - raise ValueError("path '%s' cannot be absolute" % pathname) - if pathname[-1] == '/': - raise ValueError("path '%s' cannot end with '/'" % pathname) - - paths = pathname.split('/') - while '.' in paths: - paths.remove('.') - if not paths: - return os.curdir - return os.path.join(*paths) - -# convert_path () - - -def change_root (new_root, pathname): - """Return 'pathname' with 'new_root' prepended. If 'pathname' is - relative, this is equivalent to "os.path.join(new_root,pathname)". - Otherwise, it requires making 'pathname' relative and then joining the - two, which is tricky on DOS/Windows and Mac OS. - """ - if os.name == 'posix': - if not os.path.isabs(pathname): - return os.path.join(new_root, pathname) - else: - return os.path.join(new_root, pathname[1:]) - - elif os.name == 'nt': - (drive, path) = os.path.splitdrive(pathname) - if path[0] == '\\': - path = path[1:] - return os.path.join(new_root, path) - - else: - raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) - - -_environ_checked = 0 -def check_environ (): - """Ensure that 'os.environ' has all the environment variables we - guarantee that users can use in config files, command-line options, - etc. Currently this includes: - HOME - user's home directory (Unix only) - PLAT - description of the current platform, including hardware - and OS (see 'get_platform()') - """ - global _environ_checked - if _environ_checked: - return - - if os.name == 'posix' and 'HOME' not in os.environ: - try: - import pwd - os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] - except (ImportError, KeyError): - # bpo-10496: if the current user identifier doesn't exist in the - # password database, do nothing - pass - - if 'PLAT' not in os.environ: - os.environ['PLAT'] = get_platform() - - _environ_checked = 1 - - -def subst_vars (s, local_vars): - """Perform shell/Perl-style variable substitution on 'string'. Every - occurrence of '$' followed by a name is considered a variable, and - variable is substituted by the value found in the 'local_vars' - dictionary, or in 'os.environ' if it's not in 'local_vars'. - 'os.environ' is first checked/augmented to guarantee that it contains - certain values: see 'check_environ()'. Raise ValueError for any - variables not found in either 'local_vars' or 'os.environ'. - """ - check_environ() - def _subst (match, local_vars=local_vars): - var_name = match.group(1) - if var_name in local_vars: - return str(local_vars[var_name]) - else: - return os.environ[var_name] - - try: - return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) - except KeyError as var: - raise ValueError("invalid variable '$%s'" % var) - -# subst_vars () - - -def grok_environment_error (exc, prefix="error: "): - # Function kept for backward compatibility. - # Used to try clever things with EnvironmentErrors, - # but nowadays str(exception) produces good messages. - return prefix + str(exc) - - -# Needed by 'split_quoted()' -_wordchars_re = _squote_re = _dquote_re = None -def _init_regex(): - global _wordchars_re, _squote_re, _dquote_re - _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) - _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") - _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') - -def split_quoted (s): - """Split a string up according to Unix shell-like rules for quotes and - backslashes. In short: words are delimited by spaces, as long as those - spaces are not escaped by a backslash, or inside a quoted string. - Single and double quotes are equivalent, and the quote characters can - be backslash-escaped. The backslash is stripped from any two-character - escape sequence, leaving only the escaped character. The quote - characters are stripped from any quoted string. Returns a list of - words. - """ - - # This is a nice algorithm for splitting up a single string, since it - # doesn't require character-by-character examination. It was a little - # bit of a brain-bender to get it working right, though... - if _wordchars_re is None: _init_regex() - - s = s.strip() - words = [] - pos = 0 - - while s: - m = _wordchars_re.match(s, pos) - end = m.end() - if end == len(s): - words.append(s[:end]) - break - - if s[end] in string.whitespace: # unescaped, unquoted whitespace: now - words.append(s[:end]) # we definitely have a word delimiter - s = s[end:].lstrip() - pos = 0 - - elif s[end] == '\\': # preserve whatever is being escaped; - # will become part of the current word - s = s[:end] + s[end+1:] - pos = end+1 - - else: - if s[end] == "'": # slurp singly-quoted string - m = _squote_re.match(s, end) - elif s[end] == '"': # slurp doubly-quoted string - m = _dquote_re.match(s, end) - else: - raise RuntimeError("this can't happen (bad char '%c')" % s[end]) - - if m is None: - raise ValueError("bad string (mismatched %s quotes?)" % s[end]) - - (beg, end) = m.span() - s = s[:beg] + s[beg+1:end-1] + s[end:] - pos = m.end() - 2 - - if pos >= len(s): - words.append(s) - break - - return words - -# split_quoted () - - -def execute (func, args, msg=None, verbose=0, dry_run=0): - """Perform some action that affects the outside world (eg. by - writing to the filesystem). Such actions are special because they - are disabled by the 'dry_run' flag. This method takes care of all - that bureaucracy for you; all you have to do is supply the - function to call and an argument tuple for it (to embody the - "external action" being performed), and an optional message to - print. - """ - if msg is None: - msg = "%s%r" % (func.__name__, args) - if msg[-2:] == ',)': # correct for singleton tuple - msg = msg[0:-2] + ')' - - log.info(msg) - if not dry_run: - func(*args) - - -def strtobool (val): - """Convert a string representation of truth to true (1) or false (0). - - True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values - are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if - 'val' is anything else. - """ - val = val.lower() - if val in ('y', 'yes', 't', 'true', 'on', '1'): - return 1 - elif val in ('n', 'no', 'f', 'false', 'off', '0'): - return 0 - else: - raise ValueError("invalid truth value %r" % (val,)) - - -def byte_compile (py_files, - optimize=0, force=0, - prefix=None, base_dir=None, - verbose=1, dry_run=0, - direct=None): - """Byte-compile a collection of Python source files to .pyc - files in a __pycache__ subdirectory. 'py_files' is a list - of files to compile; any files that don't end in ".py" are silently - skipped. 'optimize' must be one of the following: - 0 - don't optimize - 1 - normal optimization (like "python -O") - 2 - extra optimization (like "python -OO") - If 'force' is true, all files are recompiled regardless of - timestamps. - - The source filename encoded in each bytecode file defaults to the - filenames listed in 'py_files'; you can modify these with 'prefix' and - 'basedir'. 'prefix' is a string that will be stripped off of each - source filename, and 'base_dir' is a directory name that will be - prepended (after 'prefix' is stripped). You can supply either or both - (or neither) of 'prefix' and 'base_dir', as you wish. - - If 'dry_run' is true, doesn't actually do anything that would - affect the filesystem. - - Byte-compilation is either done directly in this interpreter process - with the standard py_compile module, or indirectly by writing a - temporary script and executing it. Normally, you should let - 'byte_compile()' figure out to use direct compilation or not (see - the source for details). The 'direct' flag is used by the script - generated in indirect mode; unless you know what you're doing, leave - it set to None. - """ - - # Late import to fix a bootstrap issue: _posixsubprocess is built by - # setup.py, but setup.py uses distutils. - import subprocess - - # nothing is done if sys.dont_write_bytecode is True - if sys.dont_write_bytecode: - raise DistutilsByteCompileError('byte-compiling is disabled.') - - # First, if the caller didn't force us into direct or indirect mode, - # figure out which mode we should be in. We take a conservative - # approach: choose direct mode *only* if the current interpreter is - # in debug mode and optimize is 0. If we're not in debug mode (-O - # or -OO), we don't know which level of optimization this - # interpreter is running with, so we can't do direct - # byte-compilation and be certain that it's the right thing. Thus, - # always compile indirectly if the current interpreter is in either - # optimize mode, or if either optimization level was requested by - # the caller. - if direct is None: - direct = (__debug__ and optimize == 0) - - # "Indirect" byte-compilation: write a temporary script and then - # run it with the appropriate flags. - if not direct: - try: - from tempfile import mkstemp - (script_fd, script_name) = mkstemp(".py") - except ImportError: - from tempfile import mktemp - (script_fd, script_name) = None, mktemp(".py") - log.info("writing byte-compilation script '%s'", script_name) - if not dry_run: - if script_fd is not None: - script = os.fdopen(script_fd, "w") - else: - script = open(script_name, "w") - - with script: - script.write("""\ -from distutils.util import byte_compile -files = [ -""") - - # XXX would be nice to write absolute filenames, just for - # safety's sake (script should be more robust in the face of - # chdir'ing before running it). But this requires abspath'ing - # 'prefix' as well, and that breaks the hack in build_lib's - # 'byte_compile()' method that carefully tacks on a trailing - # slash (os.sep really) to make sure the prefix here is "just - # right". This whole prefix business is rather delicate -- the - # problem is that it's really a directory, but I'm treating it - # as a dumb string, so trailing slashes and so forth matter. - - #py_files = map(os.path.abspath, py_files) - #if prefix: - # prefix = os.path.abspath(prefix) - - script.write(",\n".join(map(repr, py_files)) + "]\n") - script.write(""" -byte_compile(files, optimize=%r, force=%r, - prefix=%r, base_dir=%r, - verbose=%r, dry_run=0, - direct=1) -""" % (optimize, force, prefix, base_dir, verbose)) - - msg = distutils._DEPRECATION_MESSAGE - cmd = [sys.executable] - cmd.extend(subprocess._optim_args_from_interpreter_flags()) - cmd.append(f'-Wignore:{msg}:DeprecationWarning') - cmd.append(script_name) - spawn(cmd, dry_run=dry_run) - execute(os.remove, (script_name,), "removing %s" % script_name, - dry_run=dry_run) - - # "Direct" byte-compilation: use the py_compile module to compile - # right here, right now. Note that the script generated in indirect - # mode simply calls 'byte_compile()' in direct mode, a weird sort of - # cross-process recursion. Hey, it works! - else: - from py_compile import compile - - for file in py_files: - if file[-3:] != ".py": - # This lets us be lazy and not filter filenames in - # the "install_lib" command. - continue - - # Terminology from the py_compile module: - # cfile - byte-compiled file - # dfile - purported source filename (same as 'file' by default) - if optimize >= 0: - opt = '' if optimize == 0 else optimize - cfile = importlib.util.cache_from_source( - file, optimization=opt) - else: - cfile = importlib.util.cache_from_source(file) - dfile = file - if prefix: - if file[:len(prefix)] != prefix: - raise ValueError("invalid prefix: filename %r doesn't start with %r" - % (file, prefix)) - dfile = dfile[len(prefix):] - if base_dir: - dfile = os.path.join(base_dir, dfile) - - cfile_base = os.path.basename(cfile) - if direct: - if force or newer(file, cfile): - log.info("byte-compiling %s to %s", file, cfile_base) - if not dry_run: - compile(file, cfile, dfile) - else: - log.debug("skipping byte-compilation of %s to %s", - file, cfile_base) - -# byte_compile () - -def rfc822_escape (header): - """Return a version of the string escaped for inclusion in an - RFC-822 header, by ensuring there are 8 spaces space after each newline. - """ - lines = header.split('\n') - sep = '\n' + 8 * ' ' - return sep.join(lines) - -# 2to3 support - -def run_2to3(files, fixer_names=None, options=None, explicit=None): - """Invoke 2to3 on a list of Python files. - The files should all come from the build area, as the - modification is done in-place. To reduce the build time, - only files modified since the last invocation of this - function should be passed in the files argument.""" - - if not files: - return - - # Make this class local, to delay import of 2to3 - from lib2to3.refactor import RefactoringTool, get_fixers_from_package - class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw): - log.error(msg, *args) - - def log_message(self, msg, *args): - log.info(msg, *args) - - def log_debug(self, msg, *args): - log.debug(msg, *args) - - if fixer_names is None: - fixer_names = get_fixers_from_package('lib2to3.fixes') - r = DistutilsRefactoringTool(fixer_names, options=options) - r.refactor(files, write=True) - -def copydir_run_2to3(src, dest, template=None, fixer_names=None, - options=None, explicit=None): - """Recursively copy a directory, only copying new and changed files, - running run_2to3 over all newly copied Python modules afterward. - - If you give a template string, it's parsed like a MANIFEST.in. - """ - from distutils.dir_util import mkpath - from distutils.file_util import copy_file - from distutils.filelist import FileList - filelist = FileList() - curdir = os.getcwd() - os.chdir(src) - try: - filelist.findall() - finally: - os.chdir(curdir) - filelist.files[:] = filelist.allfiles - if template: - for line in template.splitlines(): - line = line.strip() - if not line: continue - filelist.process_template_line(line) - copied = [] - for filename in filelist.files: - outname = os.path.join(dest, filename) - mkpath(os.path.dirname(outname)) - res = copy_file(os.path.join(src, filename), outname, update=1) - if res[1]: copied.append(outname) - run_2to3([fn for fn in copied if fn.lower().endswith('.py')], - fixer_names=fixer_names, options=options, explicit=explicit) - return copied - -class Mixin2to3: - '''Mixin class for commands that run 2to3. - To configure 2to3, setup scripts may either change - the class variables, or inherit from individual commands - to override how 2to3 is invoked.''' - - # provide list of fixers to run; - # defaults to all from lib2to3.fixers - fixer_names = None - - # options dictionary - options = None - - # list of fixers to invoke even though they are marked as explicit - explicit = None - - def run_2to3(self, files): - return run_2to3(files, self.fixer_names, self.options, self.explicit) diff --git a/Lib/distutils/version.py b/Lib/distutils/version.py deleted file mode 100644 index c33bebaed26aee..00000000000000 --- a/Lib/distutils/version.py +++ /dev/null @@ -1,347 +0,0 @@ -# -# distutils/version.py -# -# Implements multiple version numbering conventions for the -# Python Module Distribution Utilities. -# -# $Id$ -# - -"""Provides classes to represent module version numbers (one class for -each style of version numbering). There are currently two such classes -implemented: StrictVersion and LooseVersion. - -Every version number class implements the following interface: - * the 'parse' method takes a string and parses it to some internal - representation; if the string is an invalid version number, - 'parse' raises a ValueError exception - * the class constructor takes an optional string argument which, - if supplied, is passed to 'parse' - * __str__ reconstructs the string that was passed to 'parse' (or - an equivalent string -- ie. one that will generate an equivalent - version number instance) - * __repr__ generates Python code to recreate the version number instance - * _cmp compares the current instance with either another instance - of the same class or a string (which will be parsed to an instance - of the same class, thus must follow the same rules) -""" - -import re - -class Version: - """Abstract base class for version numbering classes. Just provides - constructor (__init__) and reproducer (__repr__), because those - seem to be the same for all version numbering classes; and route - rich comparisons to _cmp. - """ - - def __init__ (self, vstring=None): - if vstring: - self.parse(vstring) - - def __repr__ (self): - return "%s ('%s')" % (self.__class__.__name__, str(self)) - - def __eq__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c == 0 - - def __lt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c < 0 - - def __le__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c <= 0 - - def __gt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c > 0 - - def __ge__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c >= 0 - - -# Interface for version-number classes -- must be implemented -# by the following classes (the concrete ones -- Version should -# be treated as an abstract class). -# __init__ (string) - create and take same action as 'parse' -# (string parameter is optional) -# parse (string) - convert a string representation to whatever -# internal representation is appropriate for -# this style of version numbering -# __str__ (self) - convert back to a string; should be very similar -# (if not identical to) the string supplied to parse -# __repr__ (self) - generate Python code to recreate -# the instance -# _cmp (self, other) - compare two version numbers ('other' may -# be an unparsed version string, or another -# instance of your version class) - - -class StrictVersion (Version): - - """Version numbering for anal retentives and software idealists. - Implements the standard interface for version number classes as - described above. A version number consists of two or three - dot-separated numeric components, with an optional "pre-release" tag - on the end. The pre-release tag consists of the letter 'a' or 'b' - followed by a number. If the numeric components of two version - numbers are equal, then one with a pre-release tag will always - be deemed earlier (lesser) than one without. - - The following are valid version numbers (shown in the order that - would be obtained by sorting according to the supplied cmp function): - - 0.4 0.4.0 (these two are equivalent) - 0.4.1 - 0.5a1 - 0.5b3 - 0.5 - 0.9.6 - 1.0 - 1.0.4a3 - 1.0.4b1 - 1.0.4 - - The following are examples of invalid version numbers: - - 1 - 2.7.2.2 - 1.3.a4 - 1.3pl1 - 1.3c4 - - The rationale for this version numbering system will be explained - in the distutils documentation. - """ - - version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', - re.VERBOSE | re.ASCII) - - - def parse (self, vstring): - match = self.version_re.match(vstring) - if not match: - raise ValueError("invalid version number '%s'" % vstring) - - (major, minor, patch, prerelease, prerelease_num) = \ - match.group(1, 2, 4, 5, 6) - - if patch: - self.version = tuple(map(int, [major, minor, patch])) - else: - self.version = tuple(map(int, [major, minor])) + (0,) - - if prerelease: - self.prerelease = (prerelease[0], int(prerelease_num)) - else: - self.prerelease = None - - - def __str__ (self): - - if self.version[2] == 0: - vstring = '.'.join(map(str, self.version[0:2])) - else: - vstring = '.'.join(map(str, self.version)) - - if self.prerelease: - vstring = vstring + self.prerelease[0] + str(self.prerelease[1]) - - return vstring - - - def _cmp (self, other): - if isinstance(other, str): - other = StrictVersion(other) - elif not isinstance(other, StrictVersion): - return NotImplemented - - if self.version != other.version: - # numeric versions don't match - # prerelease stuff doesn't matter - if self.version < other.version: - return -1 - else: - return 1 - - # have to compare prerelease - # case 1: neither has prerelease; they're equal - # case 2: self has prerelease, other doesn't; other is greater - # case 3: self doesn't have prerelease, other does: self is greater - # case 4: both have prerelease: must compare them! - - if (not self.prerelease and not other.prerelease): - return 0 - elif (self.prerelease and not other.prerelease): - return -1 - elif (not self.prerelease and other.prerelease): - return 1 - elif (self.prerelease and other.prerelease): - if self.prerelease == other.prerelease: - return 0 - elif self.prerelease < other.prerelease: - return -1 - else: - return 1 - else: - assert False, "never get here" - -# end class StrictVersion - - -# The rules according to Greg Stein: -# 1) a version number has 1 or more numbers separated by a period or by -# sequences of letters. If only periods, then these are compared -# left-to-right to determine an ordering. -# 2) sequences of letters are part of the tuple for comparison and are -# compared lexicographically -# 3) recognize the numeric components may have leading zeroes -# -# The LooseVersion class below implements these rules: a version number -# string is split up into a tuple of integer and string components, and -# comparison is a simple tuple comparison. This means that version -# numbers behave in a predictable and obvious way, but a way that might -# not necessarily be how people *want* version numbers to behave. There -# wouldn't be a problem if people could stick to purely numeric version -# numbers: just split on period and compare the numbers as tuples. -# However, people insist on putting letters into their version numbers; -# the most common purpose seems to be: -# - indicating a "pre-release" version -# ('alpha', 'beta', 'a', 'b', 'pre', 'p') -# - indicating a post-release patch ('p', 'pl', 'patch') -# but of course this can't cover all version number schemes, and there's -# no way to know what a programmer means without asking him. -# -# The problem is what to do with letters (and other non-numeric -# characters) in a version number. The current implementation does the -# obvious and predictable thing: keep them as strings and compare -# lexically within a tuple comparison. This has the desired effect if -# an appended letter sequence implies something "post-release": -# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". -# -# However, if letters in a version number imply a pre-release version, -# the "obvious" thing isn't correct. Eg. you would expect that -# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison -# implemented here, this just isn't so. -# -# Two possible solutions come to mind. The first is to tie the -# comparison algorithm to a particular set of semantic rules, as has -# been done in the StrictVersion class above. This works great as long -# as everyone can go along with bondage and discipline. Hopefully a -# (large) subset of Python module programmers will agree that the -# particular flavour of bondage and discipline provided by StrictVersion -# provides enough benefit to be worth using, and will submit their -# version numbering scheme to its domination. The free-thinking -# anarchists in the lot will never give in, though, and something needs -# to be done to accommodate them. -# -# Perhaps a "moderately strict" version class could be implemented that -# lets almost anything slide (syntactically), and makes some heuristic -# assumptions about non-digits in version number strings. This could -# sink into special-case-hell, though; if I was as talented and -# idiosyncratic as Larry Wall, I'd go ahead and implement a class that -# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is -# just as happy dealing with things like "2g6" and "1.13++". I don't -# think I'm smart enough to do it right though. -# -# In any case, I've coded the test suite for this module (see -# ../test/test_version.py) specifically to fail on things like comparing -# "1.2a2" and "1.2". That's not because the *code* is doing anything -# wrong, it's because the simple, obvious design doesn't match my -# complicated, hairy expectations for real-world version numbers. It -# would be a snap to fix the test suite to say, "Yep, LooseVersion does -# the Right Thing" (ie. the code matches the conception). But I'd rather -# have a conception that matches common notions about version numbers. - -class LooseVersion (Version): - - """Version numbering for anarchists and software realists. - Implements the standard interface for version number classes as - described above. A version number consists of a series of numbers, - separated by either periods or strings of letters. When comparing - version numbers, the numeric components will be compared - numerically, and the alphabetic components lexically. The following - are all valid version numbers, in no particular order: - - 1.5.1 - 1.5.2b2 - 161 - 3.10a - 8.02 - 3.4j - 1996.07.12 - 3.2.pl0 - 3.1.1.6 - 2g6 - 11g - 0.960923 - 2.2beta29 - 1.13++ - 5.5.kw - 2.0b1pl0 - - In fact, there is no such thing as an invalid version number under - this scheme; the rules for comparison are simple and predictable, - but may not always give the results you want (for some definition - of "want"). - """ - - component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) - - def __init__ (self, vstring=None): - if vstring: - self.parse(vstring) - - - def parse (self, vstring): - # I've given up on thinking I can reconstruct the version string - # from the parsed tuple -- so I just store the string here for - # use by __str__ - self.vstring = vstring - components = [x for x in self.component_re.split(vstring) - if x and x != '.'] - for i, obj in enumerate(components): - try: - components[i] = int(obj) - except ValueError: - pass - - self.version = components - - - def __str__ (self): - return self.vstring - - - def __repr__ (self): - return "LooseVersion ('%s')" % str(self) - - - def _cmp (self, other): - if isinstance(other, str): - other = LooseVersion(other) - elif not isinstance(other, LooseVersion): - return NotImplemented - - if self.version == other.version: - return 0 - if self.version < other.version: - return -1 - if self.version > other.version: - return 1 - - -# end class LooseVersion diff --git a/Lib/distutils/versionpredicate.py b/Lib/distutils/versionpredicate.py deleted file mode 100644 index 062c98f2489951..00000000000000 --- a/Lib/distutils/versionpredicate.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Module for parsing and testing package version predicate strings. -""" -import re -import distutils.version -import operator - - -re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", - re.ASCII) -# (package) (rest) - -re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses -re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") -# (comp) (version) - - -def splitUp(pred): - """Parse a single version comparison. - - Return (comparison string, StrictVersion) - """ - res = re_splitComparison.match(pred) - if not res: - raise ValueError("bad package restriction syntax: %r" % pred) - comp, verStr = res.groups() - return (comp, distutils.version.StrictVersion(verStr)) - -compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, - ">": operator.gt, ">=": operator.ge, "!=": operator.ne} - -class VersionPredicate: - """Parse and test package version predicates. - - >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)') - - The `name` attribute provides the full dotted name that is given:: - - >>> v.name - 'pyepat.abc' - - The str() of a `VersionPredicate` provides a normalized - human-readable version of the expression:: - - >>> print(v) - pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3) - - The `satisfied_by()` method can be used to determine with a given - version number is included in the set described by the version - restrictions:: - - >>> v.satisfied_by('1.1') - True - >>> v.satisfied_by('1.4') - True - >>> v.satisfied_by('1.0') - False - >>> v.satisfied_by('4444.4') - False - >>> v.satisfied_by('1555.1b3') - False - - `VersionPredicate` is flexible in accepting extra whitespace:: - - >>> v = VersionPredicate(' pat( == 0.1 ) ') - >>> v.name - 'pat' - >>> v.satisfied_by('0.1') - True - >>> v.satisfied_by('0.2') - False - - If any version numbers passed in do not conform to the - restrictions of `StrictVersion`, a `ValueError` is raised:: - - >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)') - Traceback (most recent call last): - ... - ValueError: invalid version number '1.2zb3' - - It the module or package name given does not conform to what's - allowed as a legal module or package name, `ValueError` is - raised:: - - >>> v = VersionPredicate('foo-bar') - Traceback (most recent call last): - ... - ValueError: expected parenthesized list: '-bar' - - >>> v = VersionPredicate('foo bar (12.21)') - Traceback (most recent call last): - ... - ValueError: expected parenthesized list: 'bar (12.21)' - - """ - - def __init__(self, versionPredicateStr): - """Parse a version predicate string. - """ - # Fields: - # name: package name - # pred: list of (comparison string, StrictVersion) - - versionPredicateStr = versionPredicateStr.strip() - if not versionPredicateStr: - raise ValueError("empty package restriction") - match = re_validPackage.match(versionPredicateStr) - if not match: - raise ValueError("bad package name in %r" % versionPredicateStr) - self.name, paren = match.groups() - paren = paren.strip() - if paren: - match = re_paren.match(paren) - if not match: - raise ValueError("expected parenthesized list: %r" % paren) - str = match.groups()[0] - self.pred = [splitUp(aPred) for aPred in str.split(",")] - if not self.pred: - raise ValueError("empty parenthesized list in %r" - % versionPredicateStr) - else: - self.pred = [] - - def __str__(self): - if self.pred: - seq = [cond + " " + str(ver) for cond, ver in self.pred] - return self.name + " (" + ", ".join(seq) + ")" - else: - return self.name - - def satisfied_by(self, version): - """True if version is compatible with all the predicates in self. - The parameter version must be acceptable to the StrictVersion - constructor. It may be either a string or StrictVersion. - """ - for cond, ver in self.pred: - if not compmap[cond](version, ver): - return False - return True - - -_provision_rx = None - -def split_provision(value): - """Return the name and optional version number of a provision. - - The version number, if given, will be returned as a `StrictVersion` - instance, otherwise it will be `None`. - - >>> split_provision('mypkg') - ('mypkg', None) - >>> split_provision(' mypkg( 1.2 ) ') - ('mypkg', StrictVersion ('1.2')) - """ - global _provision_rx - if _provision_rx is None: - _provision_rx = re.compile( - r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", - re.ASCII) - value = value.strip() - m = _provision_rx.match(value) - if not m: - raise ValueError("illegal provides specification: %r" % value) - ver = m.group(2) or None - if ver: - ver = distutils.version.StrictVersion(ver) - return m.group(1), ver diff --git a/Lib/email/generator.py b/Lib/email/generator.py index c9b121624e08d5..885e6ba98540a7 100644 --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -170,7 +170,7 @@ def _write(self, msg): # parameter. # # The way we do this, so as to make the _handle_*() methods simpler, - # is to cache any subpart writes into a buffer. The we write the + # is to cache any subpart writes into a buffer. Then we write the # headers and the buffer contents. That way, subpart handlers can # Do The Right Thing, and can still modify the Content-Type: header if # necessary. diff --git a/Lib/email/parser.py b/Lib/email/parser.py index 7db4da1ff081c1..e94d455baa5262 100644 --- a/Lib/email/parser.py +++ b/Lib/email/parser.py @@ -49,10 +49,7 @@ def parse(self, fp, headersonly=False): feedparser = FeedParser(self._class, policy=self.policy) if headersonly: feedparser._set_headersonly() - while True: - data = fp.read(8192) - if not data: - break + while data := fp.read(8192): feedparser.feed(data) return feedparser.close() diff --git a/Lib/encodings/idna.py b/Lib/encodings/idna.py index ea4058512fe366..5396047a7fb0b8 100644 --- a/Lib/encodings/idna.py +++ b/Lib/encodings/idna.py @@ -39,23 +39,21 @@ def nameprep(label): # Check bidi RandAL = [stringprep.in_table_d1(x) for x in label] - for c in RandAL: - if c: - # There is a RandAL char in the string. Must perform further - # tests: - # 1) The characters in section 5.8 MUST be prohibited. - # This is table C.8, which was already checked - # 2) If a string contains any RandALCat character, the string - # MUST NOT contain any LCat character. - if any(stringprep.in_table_d2(x) for x in label): - raise UnicodeError("Violation of BIDI requirement 2") - - # 3) If a string contains any RandALCat character, a - # RandALCat character MUST be the first character of the - # string, and a RandALCat character MUST be the last - # character of the string. - if not RandAL[0] or not RandAL[-1]: - raise UnicodeError("Violation of BIDI requirement 3") + if any(RandAL): + # There is a RandAL char in the string. Must perform further + # tests: + # 1) The characters in section 5.8 MUST be prohibited. + # This is table C.8, which was already checked + # 2) If a string contains any RandALCat character, the string + # MUST NOT contain any LCat character. + if any(stringprep.in_table_d2(x) for x in label): + raise UnicodeError("Violation of BIDI requirement 2") + # 3) If a string contains any RandALCat character, a + # RandALCat character MUST be the first character of the + # string, and a RandALCat character MUST be the last + # character of the string. + if not RandAL[0] or not RandAL[-1]: + raise UnicodeError("Violation of BIDI requirement 3") return label @@ -103,6 +101,16 @@ def ToASCII(label): raise UnicodeError("label empty or too long") def ToUnicode(label): + if len(label) > 1024: + # Protection from https://github.com/python/cpython/issues/98433. + # https://datatracker.ietf.org/doc/html/rfc5894#section-6 + # doesn't specify a label size limit prior to NAMEPREP. But having + # one makes practical sense. + # This leaves ample room for nameprep() to remove Nothing characters + # per https://www.rfc-editor.org/rfc/rfc3454#section-3.1 while still + # preventing us from wasting time decoding a big thing that'll just + # hit the actual <= 63 length limit in Step 6. + raise UnicodeError("label way too long") # Step 1: Check for ASCII if isinstance(label, bytes): pure_ascii = True diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 4a6ba9cedf3d1c..1a2f57c07ba341 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -11,7 +11,7 @@ __all__ = ["version", "bootstrap"] _PACKAGE_NAMES = ('setuptools', 'pip') _SETUPTOOLS_VERSION = "65.5.0" -_PIP_VERSION = "22.3" +_PIP_VERSION = "22.3.1" _PROJECTS = [ ("setuptools", _SETUPTOOLS_VERSION, "py3"), ("pip", _PIP_VERSION, "py3"), diff --git a/Lib/ensurepip/_bundled/pip-22.3-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-22.3.1-py3-none-any.whl similarity index 94% rename from Lib/ensurepip/_bundled/pip-22.3-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-22.3.1-py3-none-any.whl index d6fccd9de92afc..c5b7753e757df2 100644 Binary files a/Lib/ensurepip/_bundled/pip-22.3-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-22.3.1-py3-none-any.whl differ diff --git a/Lib/enum.py b/Lib/enum.py index a66c344dbc3db2..a0cad066dc23f7 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -114,9 +114,12 @@ def _break_on_call_reduce(self, proto): setattr(obj, '__module__', '<unknown>') def _iter_bits_lsb(num): - # num must be an integer + # num must be a positive integer + original = num if isinstance(num, Enum): num = num.value + if num < 0: + raise ValueError('%r is not a positive integer' % original) while num: b = num & (~num + 1) yield b @@ -171,7 +174,8 @@ class auto: """ Instances are replaced with an appropriate value in Enum class suites. """ - value = _auto_null + def __init__(self, value=_auto_null): + self.value = value def __repr__(self): return "auto(%r)" % self.value @@ -416,7 +420,7 @@ def __setitem__(self, key, value): value = value.value elif _is_descriptor(value): pass - # TODO: uncomment next three lines in 3.12 + # TODO: uncomment next three lines in 3.13 # elif _is_internal_class(self._cls_name, value): # # do nothing, name will be a normal attribute # pass @@ -427,15 +431,33 @@ def __setitem__(self, key, value): elif isinstance(value, member): # unwrap value here -- it will become a member value = value.value + non_auto_store = True + single = False if isinstance(value, auto): - if value.value == _auto_null: - value.value = self._generate_next_value( - key, 1, len(self._member_names), self._last_values[:], - ) - self._auto_called = True - value = value.value + single = True + value = (value, ) + if type(value) is tuple and any(isinstance(v, auto) for v in value): + # insist on an actual tuple, no subclasses, in keeping with only supporting + # top-level auto() usage (not contained in any other data structure) + auto_valued = [] + for v in value: + if isinstance(v, auto): + non_auto_store = False + if v.value == _auto_null: + v.value = self._generate_next_value( + key, 1, len(self._member_names), self._last_values[:], + ) + self._auto_called = True + v = v.value + self._last_values.append(v) + auto_valued.append(v) + if single: + value = auto_valued[0] + else: + value = tuple(auto_valued) self._member_names[key] = None - self._last_values.append(value) + if non_auto_store: + self._last_values.append(value) super().__setitem__(key, value) def update(self, members, **more_members): @@ -672,7 +694,7 @@ def __bool__(cls): """ return True - def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None): + def __call__(cls, value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None): """ Either returns an existing member, or creates a new enum class. @@ -680,6 +702,8 @@ def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, s to an enumeration member (i.e. Color(3)) and for the functional API (i.e. Color = Enum('Color', names='RED GREEN BLUE')). + The value lookup branch is chosen if the enum is final. + When used for the functional API: `value` will be the name of the new class. @@ -697,12 +721,15 @@ def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, s `type`, if set, will be mixed in as the first base class. """ - if names is None: # simple value lookup + if cls._member_map_: + # simple value lookup if members exist + if names: + value = (value, names) + values return cls.__new__(cls, value) # otherwise, functional API: we're creating a new Enum type return cls._create_( - value, - names, + class_name=value, + names=names, module=module, qualname=qualname, type=type, @@ -930,7 +957,15 @@ def _find_data_repr_(mcls, class_name, bases): return base._value_repr_ elif '__repr__' in base.__dict__: # this is our data repr - return base.__dict__['__repr__'] + # double-check if a dataclass with a default __repr__ + if ( + '__dataclass_fields__' in base.__dict__ + and '__dataclass_params__' in base.__dict__ + and base.__dict__['__dataclass_params__'].repr + ): + return _dataclass_repr + else: + return base.__dict__['__repr__'] return None @classmethod @@ -1021,20 +1056,20 @@ class Enum(metaclass=EnumType): Access them by: - - attribute access:: + - attribute access: - >>> Color.RED - <Color.RED: 1> + >>> Color.RED + <Color.RED: 1> - value lookup: - >>> Color(1) - <Color.RED: 1> + >>> Color(1) + <Color.RED: 1> - name lookup: - >>> Color['RED'] - <Color.RED: 1> + >>> Color['RED'] + <Color.RED: 1> Enumerations can be iterated over, and know how many members they have: @@ -1526,6 +1561,14 @@ def _power_of_two(value): return False return value == 2 ** _high_bit(value) +def _dataclass_repr(self): + dcf = self.__dataclass_fields__ + return ', '.join( + '%s=%r' % (k, getattr(self, k)) + for k in dcf.keys() + if dcf[k].repr + ) + def global_enum_repr(self): """ use module.enum_name instead of class.enum_name @@ -1822,6 +1865,9 @@ def __call__(self, enumeration): if name in member_names: # not an alias continue + if alias.value < 0: + # negative numbers are not checked + continue values = list(_iter_bits_lsb(alias.value)) missed = [v for v in values if v not in member_values] if missed: diff --git a/Lib/ftplib.py b/Lib/ftplib.py index dc9a8afbd8d247..a56e0c3085701b 100644 --- a/Lib/ftplib.py +++ b/Lib/ftplib.py @@ -434,10 +434,7 @@ def retrbinary(self, cmd, callback, blocksize=8192, rest=None): """ self.voidcmd('TYPE I') with self.transfercmd(cmd, rest) as conn: - while 1: - data = conn.recv(blocksize) - if not data: - break + while data := conn.recv(blocksize): callback(data) # shutdown ssl layer if _SSLSocket is not None and isinstance(conn, _SSLSocket): @@ -496,10 +493,7 @@ def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None): """ self.voidcmd('TYPE I') with self.transfercmd(cmd, rest) as conn: - while 1: - buf = fp.read(blocksize) - if not buf: - break + while buf := fp.read(blocksize): conn.sendall(buf) if callback: callback(buf) @@ -713,28 +707,12 @@ class FTP_TLS(FTP): '221 Goodbye.' >>> ''' - ssl_version = ssl.PROTOCOL_TLS_CLIENT def __init__(self, host='', user='', passwd='', acct='', - keyfile=None, certfile=None, context=None, - timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *, - encoding='utf-8'): - if context is not None and keyfile is not None: - raise ValueError("context and keyfile arguments are mutually " - "exclusive") - if context is not None and certfile is not None: - raise ValueError("context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom context instead", DeprecationWarning, 2) - self.keyfile = keyfile - self.certfile = certfile + *, context=None, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None, encoding='utf-8'): if context is None: - context = ssl._create_stdlib_context(self.ssl_version, - certfile=certfile, - keyfile=keyfile) + context = ssl._create_stdlib_context() self.context = context self._prot_p = False super().__init__(host, user, passwd, acct, @@ -749,7 +727,7 @@ def auth(self): '''Set up secure control connection by using TLS/SSL.''' if isinstance(self.sock, ssl.SSLSocket): raise ValueError("Already using TLS") - if self.ssl_version >= ssl.PROTOCOL_TLS: + if self.context.protocol >= ssl.PROTOCOL_TLS: resp = self.voidcmd('AUTH TLS') else: resp = self.voidcmd('AUTH SSL') diff --git a/Lib/http/client.py b/Lib/http/client.py index 0720990f84e7ed..15c5cf634cf508 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -578,11 +578,7 @@ def _read_chunked(self, amt=None): assert self.chunked != _UNKNOWN value = [] try: - while True: - chunk_left = self._get_chunk_left() - if chunk_left is None: - break - + while (chunk_left := self._get_chunk_left()) is not None: if amt is not None and amt <= chunk_left: value.append(self._safe_read(amt)) self.chunk_left = chunk_left - amt @@ -998,10 +994,7 @@ def send(self, data): encode = self._is_textIO(data) if encode and self.debuglevel > 0: print("encoding file using iso-8859-1") - while 1: - datablock = data.read(self.blocksize) - if not datablock: - break + while datablock := data.read(self.blocksize): if encode: datablock = datablock.encode("iso-8859-1") sys.audit("http.client.send", self, datablock) @@ -1031,10 +1024,7 @@ def _read_readable(self, readable): encode = self._is_textIO(readable) if encode and self.debuglevel > 0: print("encoding file using iso-8859-1") - while True: - datablock = readable.read(self.blocksize) - if not datablock: - break + while datablock := readable.read(self.blocksize): if encode: datablock = datablock.encode("iso-8859-1") yield datablock @@ -1414,33 +1404,14 @@ class HTTPSConnection(HTTPConnection): default_port = HTTPS_PORT - # XXX Should key_file and cert_file be deprecated in favour of context? - - def __init__(self, host, port=None, key_file=None, cert_file=None, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, *, context=None, - check_hostname=None, blocksize=8192): + def __init__(self, host, port=None, + *, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, context=None, blocksize=8192): super(HTTPSConnection, self).__init__(host, port, timeout, source_address, blocksize=blocksize) - if (key_file is not None or cert_file is not None or - check_hostname is not None): - import warnings - warnings.warn("key_file, cert_file and check_hostname are " - "deprecated, use a custom context instead.", - DeprecationWarning, 2) - self.key_file = key_file - self.cert_file = cert_file if context is None: context = _create_https_context(self._http_vsn) - if check_hostname is not None: - context.check_hostname = check_hostname - if key_file or cert_file: - context.load_cert_chain(cert_file, key_file) - # cert and key file means the user wants to authenticate. - # enable TLS 1.3 PHA implicitly even for custom contexts. - if context.post_handshake_auth is not None: - context.post_handshake_auth = True self._context = context def connect(self): diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py index 65c45e2b17dfc0..b0161a86fdbb51 100644 --- a/Lib/http/cookiejar.py +++ b/Lib/http/cookiejar.py @@ -1915,9 +1915,7 @@ def _really_load(self, f, filename, ignore_discard, ignore_expires): "comment", "commenturl") try: - while 1: - line = f.readline() - if line == "": break + while (line := f.readline()) != "": if not line.startswith(header): continue line = line[len(header):].strip() @@ -2017,12 +2015,9 @@ def _really_load(self, f, filename, ignore_discard, ignore_expires): filename) try: - while 1: - line = f.readline() + while (line := f.readline()) != "": rest = {} - if line == "": break - # httponly is a cookie flag as defined in rfc6265 # when encoded in a netscape cookie file, # the line is prepended with "#HttpOnly_" diff --git a/Lib/http/server.py b/Lib/http/server.py index 8aee31bac2752a..8acabff605e795 100644 --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -93,6 +93,7 @@ import html import http.client import io +import itertools import mimetypes import os import posixpath @@ -562,6 +563,11 @@ def log_error(self, format, *args): self.log_message(format, *args) + # https://en.wikipedia.org/wiki/List_of_Unicode_characters#Control_codes + _control_char_table = str.maketrans( + {c: fr'\x{c:02x}' for c in itertools.chain(range(0x20), range(0x7f,0xa0))}) + _control_char_table[ord('\\')] = r'\\' + def log_message(self, format, *args): """Log an arbitrary message. @@ -577,12 +583,16 @@ def log_message(self, format, *args): The client ip and current date/time are prefixed to every message. + Unicode control characters are replaced with escaped hex + before writing the output to stderr. + """ + message = format % args sys.stderr.write("%s - - [%s] %s\n" % (self.address_string(), self.log_date_time_string(), - format%args)) + message.translate(self._control_char_table))) def version_string(self): """Return the server software version string.""" diff --git a/Lib/idlelib/macosx.py b/Lib/idlelib/macosx.py index 89b645702d334d..2ea02ec04d661a 100644 --- a/Lib/idlelib/macosx.py +++ b/Lib/idlelib/macosx.py @@ -174,9 +174,8 @@ def overrideRootMenu(root, flist): del mainmenu.menudefs[-3][1][0:2] menubar = Menu(root) root.configure(menu=menubar) - menudict = {} - menudict['window'] = menu = Menu(menubar, name='window', tearoff=0) + menu = Menu(menubar, name='window', tearoff=0) menubar.add_cascade(label='Window', menu=menu, underline=0) def postwindowsmenu(menu=menu): @@ -226,8 +225,7 @@ def help_dialog(event=None): if isCarbonTk(): # for Carbon AquaTk, replace the default Tk apple menu - menudict['application'] = menu = Menu(menubar, name='apple', - tearoff=0) + menu = Menu(menubar, name='apple', tearoff=0) menubar.add_cascade(label='IDLE', menu=menu) mainmenu.menudefs.insert(0, ('application', [ diff --git a/Lib/imaplib.py b/Lib/imaplib.py index fa4c0f8f62361a..577b4b9b03a88d 100644 --- a/Lib/imaplib.py +++ b/Lib/imaplib.py @@ -1285,16 +1285,12 @@ class IMAP4_SSL(IMAP4): """IMAP4 client class over SSL connection - Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context[, timeout=None]]]]]]) + Instantiate with: IMAP4_SSL([host[, port[, ssl_context[, timeout=None]]]]) host - host's name (default: localhost); port - port number (default: standard IMAP4 SSL port); - keyfile - PEM formatted file that contains your private key (default: None); - certfile - PEM formatted certificate chain file (default: None); ssl_context - a SSLContext object that contains your certificate chain and private key (default: None) - Note: if ssl_context is provided, then parameters keyfile or - certfile should not be set otherwise ValueError is raised. timeout - socket timeout (default: None) If timeout is not given or is None, the global default socket timeout is used @@ -1302,23 +1298,10 @@ class IMAP4_SSL(IMAP4): """ - def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None, - certfile=None, ssl_context=None, timeout=None): - if ssl_context is not None and keyfile is not None: - raise ValueError("ssl_context and keyfile arguments are mutually " - "exclusive") - if ssl_context is not None and certfile is not None: - raise ValueError("ssl_context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom ssl_context instead", DeprecationWarning, 2) - self.keyfile = keyfile - self.certfile = certfile + def __init__(self, host='', port=IMAP4_SSL_PORT, + *, ssl_context=None, timeout=None): if ssl_context is None: - ssl_context = ssl._create_stdlib_context(certfile=certfile, - keyfile=keyfile) + ssl_context = ssl._create_stdlib_context() self.ssl_context = ssl_context IMAP4.__init__(self, host, port, timeout) diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index e0110e701d6cde..1a1d5cf7c9c33e 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -580,7 +580,7 @@ def _module_repr_from_spec(spec): if spec.loader is None: return f'<module {name!r}>' else: - return f'<module {name!r} ({spec.loader!r})>' + return f'<module {name!r} (namespace) from {list(spec.loader._path)}>' else: if spec.has_location: return f'<module {name!r} from {spec.origin!r}>' diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 9c14f71b6c0739..7699d3a31cc5f7 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -424,6 +424,8 @@ def _write_atomic(path, data, mode=0o666): # Python 3.12a1 3508 (Add CLEANUP_THROW) # Python 3.12a1 3509 (Conditional jumps only jump forward) # Python 3.12a1 3510 (FOR_ITER leaves iterator on the stack) +# Python 3.12a1 3511 (Add STOPITERATION_ERROR instruction) +# Python 3.12a1 3512 (Remove all unused consts from code objects) # Python 3.13 will start with 3550 @@ -436,7 +438,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3510).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3562).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c @@ -1142,7 +1144,8 @@ def get_code(self, fullname): source_mtime is not None): if hash_based: if source_hash is None: - source_hash = _imp.source_hash(source_bytes) + source_hash = _imp.source_hash(_RAW_MAGIC_NUMBER, + source_bytes) data = _code_to_hash_pyc(code_object, source_hash, check_source) else: data = _code_to_timestamp_pyc(code_object, source_mtime, diff --git a/Lib/inspect.py b/Lib/inspect.py index f6750c3b211fbd..e165937e448a95 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -537,7 +537,7 @@ def _getmembers(object, predicate, getter): processed = set() names = dir(object) if isclass(object): - mro = (object,) + getmro(object) + mro = getmro(object) # add any DynamicClassAttributes to the list of names if object is a class; # this may result in duplicate entries if, for example, a virtual # attribute with the same name as a DynamicClassAttribute exists @@ -656,7 +656,7 @@ def classify_class_attrs(cls): if name == '__dict__': raise Exception("__dict__ is special, don't want the proxy") get_obj = getattr(cls, name) - except Exception as exc: + except Exception: pass else: homecls = getattr(get_obj, "__objclass__", homecls) @@ -1160,7 +1160,6 @@ def __init__(self): self.started = False self.passline = False self.indecorator = False - self.decoratorhasargs = False self.last = 1 self.body_col0 = None @@ -1175,13 +1174,6 @@ def tokeneater(self, type, token, srowcol, erowcol, line): self.islambda = True self.started = True self.passline = True # skip to the end of the line - elif token == "(": - if self.indecorator: - self.decoratorhasargs = True - elif token == ")": - if self.indecorator: - self.indecorator = False - self.decoratorhasargs = False elif type == tokenize.NEWLINE: self.passline = False # stop skipping when a NEWLINE is seen self.last = srowcol[0] @@ -1189,7 +1181,7 @@ def tokeneater(self, type, token, srowcol, erowcol, line): raise EndOfBlock # hitting a NEWLINE when in a decorator without args # ends the decorator - if self.indecorator and not self.decoratorhasargs: + if self.indecorator: self.indecorator = False elif self.passline: pass @@ -1310,7 +1302,6 @@ def getargs(co): nkwargs = co.co_kwonlyargcount args = list(names[:nargs]) kwonlyargs = list(names[nargs:nargs+nkwargs]) - step = 0 nargs += nkwargs varargs = None @@ -2185,7 +2176,6 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True): parameters = [] empty = Parameter.empty - invalid = object() module = None module_dict = {} @@ -2235,17 +2225,12 @@ def visit_Name(self, node): def p(name_node, default_node, default=empty): name = parse_name(name_node) - if name is invalid: - return None if default_node and default_node is not _empty: try: default_node = RewriteSymbolics().visit(default_node) - o = ast.literal_eval(default_node) + default = ast.literal_eval(default_node) except ValueError: - o = invalid - if o is invalid: return None - default = o if o is not invalid else default parameters.append(Parameter(name, kind, default=default, annotation=empty)) # non-keyword-only parameters @@ -2442,7 +2427,10 @@ def _signature_from_callable(obj, *, # Was this function wrapped by a decorator? if follow_wrapper_chains: - obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__"))) + # Unwrap until we find an explicit signature or a MethodType (which will be + # handled explicitly below). + obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__") + or isinstance(f, types.MethodType))) if isinstance(obj, types.MethodType): # If the unwrapped object is a *method*, we might want to # skip its first parameter (self). diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 3f15601e700d68..1cb71d8032e173 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1077,15 +1077,16 @@ def is_link_local(self): @property def is_private(self): - """Test if this address is allocated for private networks. + """Test if this network belongs to a private range. Returns: - A boolean, True if the address is reserved per + A boolean, True if the network is reserved per iana-ipv4-special-registry or iana-ipv6-special-registry. """ - return (self.network_address.is_private and - self.broadcast_address.is_private) + return any(self.network_address in priv_network and + self.broadcast_address in priv_network + for priv_network in self._constants._private_networks) @property def is_global(self): @@ -1122,6 +1123,15 @@ def is_loopback(self): return (self.network_address.is_loopback and self.broadcast_address.is_loopback) + +class _BaseConstants: + + _private_networks = [] + + +_BaseNetwork._constants = _BaseConstants + + class _BaseV4: """Base IPv4 object. @@ -1561,6 +1571,7 @@ class _IPv4Constants: IPv4Address._constants = _IPv4Constants +IPv4Network._constants = _IPv4Constants class _BaseV6: @@ -2285,3 +2296,4 @@ class _IPv6Constants: IPv6Address._constants = _IPv6Constants +IPv6Network._constants = _IPv6Constants diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 86e1efe6e653ac..9241d73d0fd03c 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -511,7 +511,7 @@ def __init__(self, *args, **kwargs): def usesTime(self): fmt = self._fmt - return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0 + return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_search) >= 0 def validate(self): pattern = Template.pattern diff --git a/Lib/mailbox.py b/Lib/mailbox.py index 70da07ed2e9e8b..59834a2b3b5243 100644 --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -1956,10 +1956,7 @@ def readlines(self, sizehint=None): def __iter__(self): """Iterate over lines.""" - while True: - line = self.readline() - if not line: - return + while line := self.readline(): yield line def tell(self): diff --git a/Lib/mailcap.py b/Lib/mailcap.py index 7278ea7051fccf..2f4656e854b3bb 100644 --- a/Lib/mailcap.py +++ b/Lib/mailcap.py @@ -90,9 +90,7 @@ def _readmailcapfile(fp, lineno): the viewing command is stored with the key "view". """ caps = {} - while 1: - line = fp.readline() - if not line: break + while line := fp.readline(): # Ignore comments and blank lines if line[0] == '#' or line.strip() == '': continue diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index 3224363a3f2bfb..37228de4828de5 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -217,10 +217,7 @@ def readfp(self, fp, strict=True): list of standard types, else to the list of non-standard types. """ - while 1: - line = fp.readline() - if not line: - break + while line := fp.readline(): words = line.split() for i in range(len(words)): if words[i][0] == '#': diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index b08144f7a1a169..1a8822b9db012d 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -728,6 +728,74 @@ def PipeClient(address): WELCOME = b'#WELCOME#' FAILURE = b'#FAILURE#' +# multiprocessing.connection Authentication Handshake Protocol Description +# (as documented for reference after reading the existing code) +# ============================================================================= +# +# On Windows: native pipes with "overlapped IO" are used to send the bytes, +# instead of the length prefix SIZE scheme described below. (ie: the OS deals +# with message sizes for us) +# +# Protocol error behaviors: +# +# On POSIX, any failure to receive the length prefix into SIZE, for SIZE greater +# than the requested maxsize to receive, or receiving fewer than SIZE bytes +# results in the connection being closed and auth to fail. +# +# On Windows, receiving too few bytes is never a low level _recv_bytes read +# error, receiving too many will trigger an error only if receive maxsize +# value was larger than 128 OR the if the data arrived in smaller pieces. +# +# Serving side Client side +# ------------------------------ --------------------------------------- +# 0. Open a connection on the pipe. +# 1. Accept connection. +# 2. New random 20 bytes -> MESSAGE +# 3. send 4 byte length (net order) +# prefix followed by: +# b'#CHALLENGE#' + MESSAGE +# 4. Receive 4 bytes, parse as network byte +# order integer. If it is -1, receive an +# additional 8 bytes, parse that as network +# byte order. The result is the length of +# the data that follows -> SIZE. +# 5. Receive min(SIZE, 256) bytes -> M1 +# 6. Assert that M1 starts with: +# b'#CHALLENGE#' +# 7. Strip that prefix from M1 into -> M2 +# 8. Compute HMAC-MD5 of AUTHKEY, M2 -> C_DIGEST +# 9. Send 4 byte length prefix (net order) +# followed by C_DIGEST bytes. +# 10. Compute HMAC-MD5 of AUTHKEY, +# MESSAGE into -> M_DIGEST. +# 11. Receive 4 or 4+8 byte length +# prefix (#4 dance) -> SIZE. +# 12. Receive min(SIZE, 256) -> C_D. +# 13. Compare M_DIGEST == C_D: +# 14a: Match? Send length prefix & +# b'#WELCOME#' +# <- RETURN +# 14b: Mismatch? Send len prefix & +# b'#FAILURE#' +# <- CLOSE & AuthenticationError +# 15. Receive 4 or 4+8 byte length prefix (net +# order) again as in #4 into -> SIZE. +# 16. Receive min(SIZE, 256) bytes -> M3. +# 17. Compare M3 == b'#WELCOME#': +# 17a. Match? <- RETURN +# 17b. Mismatch? <- CLOSE & AuthenticationError +# +# If this RETURNed, the connection remains open: it has been authenticated. +# +# Length prefixes are used consistently even though every step so far has +# always been a singular specific fixed length. This may help us evolve +# the protocol in the future without breaking backwards compatibility. +# +# Similarly the initial challenge message from the serving side has always +# been 20 bytes, but clients can accept a 100+ so using the length of the +# opening challenge message as an indicator of protocol version may work. + + def deliver_challenge(connection, authkey): import hmac if not isinstance(authkey, bytes): diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py index 961d7e5991847a..4f5d88cb975cb7 100644 --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -696,7 +696,7 @@ def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool, change_notifier, if (not result_handler.is_alive()) and (len(cache) != 0): raise AssertionError( - "Cannot have cache with result_hander not alive") + "Cannot have cache with result_handler not alive") result_handler._state = TERMINATE change_notifier.put(None) diff --git a/Lib/multiprocessing/shared_memory.py b/Lib/multiprocessing/shared_memory.py index 881f2001dd5980..9a1e5aa17b87a2 100644 --- a/Lib/multiprocessing/shared_memory.py +++ b/Lib/multiprocessing/shared_memory.py @@ -173,7 +173,10 @@ def __init__(self, name=None, create=False, size=0): ) finally: _winapi.CloseHandle(h_map) - size = _winapi.VirtualQuerySize(p_buf) + try: + size = _winapi.VirtualQuerySize(p_buf) + finally: + _winapi.UnmapViewOfFile(p_buf) self._mmap = mmap.mmap(-1, size, tagname=name) self._size = size diff --git a/Lib/ntpath.py b/Lib/ntpath.py index d9582f4087433e..265eaa8d4b953f 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -30,7 +30,7 @@ "ismount", "expanduser","expandvars","normpath","abspath", "curdir","pardir","sep","pathsep","defpath","altsep", "extsep","devnull","realpath","supports_unicode_filenames","relpath", - "samefile", "sameopenfile", "samestat", "commonpath"] + "samefile", "sameopenfile", "samestat", "commonpath", "isjunction"] def _get_bothseps(path): if isinstance(path, bytes): @@ -267,6 +267,24 @@ def islink(path): return False return stat.S_ISLNK(st.st_mode) + +# Is a path a junction? + +if hasattr(os.stat_result, 'st_reparse_tag'): + def isjunction(path): + """Test whether a path is a junction""" + try: + st = os.lstat(path) + except (OSError, ValueError, AttributeError): + return False + return bool(st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT) +else: + def isjunction(path): + """Test whether a path is a junction""" + os.fspath(path) + return False + + # Being true for dangling symbolic links is also useful. def lexists(path): @@ -645,12 +663,15 @@ def _getfinalpathname_nonstrict(path): # 21: ERROR_NOT_READY (implies drive with no media) # 32: ERROR_SHARING_VIOLATION (probably an NTFS paging file) # 50: ERROR_NOT_SUPPORTED + # 53: ERROR_BAD_NETPATH + # 65: ERROR_NETWORK_ACCESS_DENIED # 67: ERROR_BAD_NET_NAME (implies remote server unavailable) # 87: ERROR_INVALID_PARAMETER # 123: ERROR_INVALID_NAME + # 161: ERROR_BAD_PATHNAME # 1920: ERROR_CANT_ACCESS_FILE # 1921: ERROR_CANT_RESOLVE_FILENAME (implies unfollowable symlink) - allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 67, 87, 123, 1920, 1921 + allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 53, 65, 67, 87, 123, 161, 1920, 1921 # Non-strict algorithm is to find as much of the target directory # as we can and join the rest. diff --git a/Lib/opcode.py b/Lib/opcode.py index d655b0e44a4c19..25e23bc89bbfda 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -77,6 +77,7 @@ def pseudo_op(name, op, real_ops): def_op('CACHE', 0) def_op('POP_TOP', 1) def_op('PUSH_NULL', 2) +def_op('INTERPRETER_EXIT', 3) def_op('END_FOR', 4) @@ -86,6 +87,10 @@ def pseudo_op(name, op, real_ops): def_op('UNARY_NOT', 12) def_op('UNARY_INVERT', 15) +def_op('UNARY_POSITIVE_R', 16) +def_op('UNARY_NEGATIVE_R', 17) +def_op('UNARY_NOT_R', 18) +def_op('UNARY_INVERT_R', 19) def_op('BINARY_SUBSCR', 25) def_op('BINARY_SLICE', 26) @@ -111,6 +116,8 @@ def pseudo_op(name, op, real_ops): def_op('STORE_SUBSCR', 60) def_op('DELETE_SUBSCR', 61) +def_op('STOPITERATION_ERROR', 63) + def_op('GET_ITER', 68) def_op('GET_YIELD_FROM_ITER', 69) def_op('PRINT_EXPR', 70) @@ -205,6 +212,12 @@ def pseudo_op(name, op, real_ops): def_op('RESUME', 151) # This must be kept in sync with deepfreeze.py def_op('MATCH_CLASS', 152) +def_op('LOAD_FAST_R', 153) # Local variable number, no null check +haslocal.append(124) +def_op('STORE_FAST_R', 154) # Local variable number +haslocal.append(125) + + def_op('FORMAT_VALUE', 155) def_op('BUILD_CONST_KEY_MAP', 156) def_op('BUILD_STRING', 157) @@ -276,7 +289,6 @@ def pseudo_op(name, op, real_ops): _specializations = { "BINARY_OP": [ - "BINARY_OP_ADAPTIVE", "BINARY_OP_ADD_FLOAT", "BINARY_OP_ADD_INT", "BINARY_OP_ADD_UNICODE", @@ -287,14 +299,12 @@ def pseudo_op(name, op, real_ops): "BINARY_OP_SUBTRACT_INT", ], "BINARY_SUBSCR": [ - "BINARY_SUBSCR_ADAPTIVE", "BINARY_SUBSCR_DICT", "BINARY_SUBSCR_GETITEM", "BINARY_SUBSCR_LIST_INT", "BINARY_SUBSCR_TUPLE_INT", ], "CALL": [ - "CALL_ADAPTIVE", "CALL_PY_EXACT_ARGS", "CALL_PY_WITH_DEFAULTS", "CALL_BOUND_METHOD_EXACT_ARGS", @@ -314,24 +324,17 @@ def pseudo_op(name, op, real_ops): "CALL_NO_KW_TYPE_1", ], "COMPARE_OP": [ - "COMPARE_OP_ADAPTIVE", "COMPARE_OP_FLOAT_JUMP", "COMPARE_OP_INT_JUMP", "COMPARE_OP_STR_JUMP", ], - "EXTENDED_ARG": [ - "EXTENDED_ARG_QUICK", - ], "FOR_ITER": [ - "FOR_ITER_ADAPTIVE", "FOR_ITER_LIST", + "FOR_ITER_TUPLE", "FOR_ITER_RANGE", - ], - "JUMP_BACKWARD": [ - "JUMP_BACKWARD_QUICK", + "FOR_ITER_GEN", ], "LOAD_ATTR": [ - "LOAD_ATTR_ADAPTIVE", # These potentially push [NULL, bound method] onto the stack. "LOAD_ATTR_CLASS", "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", @@ -354,15 +357,10 @@ def pseudo_op(name, op, real_ops): "LOAD_FAST__LOAD_FAST", ], "LOAD_GLOBAL": [ - "LOAD_GLOBAL_ADAPTIVE", "LOAD_GLOBAL_BUILTIN", "LOAD_GLOBAL_MODULE", ], - "RESUME": [ - "RESUME_QUICK", - ], "STORE_ATTR": [ - "STORE_ATTR_ADAPTIVE", "STORE_ATTR_INSTANCE_VALUE", "STORE_ATTR_SLOT", "STORE_ATTR_WITH_HINT", @@ -372,12 +370,10 @@ def pseudo_op(name, op, real_ops): "STORE_FAST__STORE_FAST", ], "STORE_SUBSCR": [ - "STORE_SUBSCR_ADAPTIVE", "STORE_SUBSCR_DICT", "STORE_SUBSCR_LIST_INT", ], "UNPACK_SEQUENCE": [ - "UNPACK_SEQUENCE_ADAPTIVE", "UNPACK_SEQUENCE_LIST", "UNPACK_SEQUENCE_TUPLE", "UNPACK_SEQUENCE_TWO_TUPLE", @@ -395,6 +391,9 @@ def pseudo_op(name, op, real_ops): "deopt", ] +# number of codewords for opcode+oparg(s) +_opsize = 2 + _cache_format = { "LOAD_GLOBAL": { "counter": 1, diff --git a/Lib/pathlib.py b/Lib/pathlib.py index 1498ce08be4012..f31eb3010368d5 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -200,6 +200,7 @@ def make_uri(self, path): # Globbing helpers # +@functools.lru_cache() def _make_selector(pattern_parts, flavour): pat = pattern_parts[0] child_parts = pattern_parts[1:] @@ -215,9 +216,6 @@ def _make_selector(pattern_parts, flavour): cls = _PreciseSelector return cls(pat, child_parts, flavour) -if hasattr(functools, "lru_cache"): - _make_selector = functools.lru_cache()(_make_selector) - class _Selector: """A selector matches a specific glob pattern part against the children @@ -634,57 +632,27 @@ def relative_to(self, *other, walk_up=False): The *walk_up* parameter controls whether `..` may be used to resolve the path. """ - # For the purpose of this method, drive and root are considered - # separate parts, i.e.: - # Path('c:/').relative_to('c:') gives Path('/') - # Path('c:/').relative_to('/') raise ValueError if not other: raise TypeError("need at least one argument") - parts = self._parts - drv = self._drv - root = self._root - if root: - abs_parts = [drv, root] + parts[1:] - else: - abs_parts = parts - other_drv, other_root, other_parts = self._parse_args(other) - if other_root: - other_abs_parts = [other_drv, other_root] + other_parts[1:] - else: - other_abs_parts = other_parts - num_parts = len(other_abs_parts) - casefold = self._flavour.casefold_parts - num_common_parts = 0 - for part, other_part in zip(casefold(abs_parts), casefold(other_abs_parts)): - if part != other_part: + path_cls = type(self) + other = path_cls(*other) + for step, path in enumerate([other] + list(other.parents)): + if self.is_relative_to(path): break - num_common_parts += 1 - if walk_up: - failure = root != other_root - if drv or other_drv: - failure = casefold([drv]) != casefold([other_drv]) or (failure and num_parts > 1) - error_message = "{!r} is not on the same drive as {!r}" - up_parts = (num_parts-num_common_parts)*['..'] else: - failure = (root or drv) if num_parts == 0 else num_common_parts != num_parts - error_message = "{!r} is not in the subpath of {!r}" - up_parts = [] - error_message += " OR one path is relative and the other is absolute." - if failure: - formatted = self._format_parsed_parts(other_drv, other_root, other_parts) - raise ValueError(error_message.format(str(self), str(formatted))) - path_parts = up_parts + abs_parts[num_common_parts:] - new_root = root if num_common_parts == 1 else '' - return self._from_parsed_parts('', new_root, path_parts) + raise ValueError(f"{str(self)!r} and {str(other)!r} have different anchors") + if step and not walk_up: + raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") + parts = ('..',) * step + self.parts[len(path.parts):] + return path_cls(*parts) def is_relative_to(self, *other): """Return True if the path is relative to another path or False. """ - try: - self.relative_to(*other) - return True - except ValueError: - return False + if not other: + raise TypeError("need at least one argument") + other = type(self)(*other) + return other == self or other in self.parents @property def parts(self): @@ -869,8 +837,10 @@ def samefile(self, other_path): return os.path.samestat(st, other_st) def iterdir(self): - """Iterate over the files in this directory. Does not yield any - result for the special paths '.' and '..'. + """Yield path objects of the directory contents. + + The children are yielded in arbitrary order, and the + special entries '.' and '..' are not included. """ for name in os.listdir(self): yield self._make_child_relpath(name) @@ -1223,6 +1193,12 @@ def is_symlink(self): # Non-encodable path return False + def is_junction(self): + """ + Whether this path is a junction. + """ + return self._flavour.pathmod.isjunction(self) + def is_block_device(self): """ Whether this path is a block device. diff --git a/Lib/platform.py b/Lib/platform.py index 9f5b317287530b..6745321e31c279 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -847,6 +847,8 @@ class uname_result( except when needed. """ + _fields = ('system', 'node', 'release', 'version', 'machine', 'processor') + @functools.cached_property def processor(self): return _unknown_as_blank(_Processor.get()) @@ -860,7 +862,7 @@ def __iter__(self): @classmethod def _make(cls, iterable): # override factory to affect length check - num_fields = len(cls._fields) + num_fields = len(cls._fields) - 1 result = cls.__new__(cls, *iterable) if len(result) != num_fields + 1: msg = f'Expected {num_fields} arguments, got {len(result)}' @@ -874,7 +876,7 @@ def __len__(self): return len(tuple(iter(self))) def __reduce__(self): - return uname_result, tuple(self)[:len(self._fields)] + return uname_result, tuple(self)[:len(self._fields) - 1] _uname_cache = None diff --git a/Lib/poplib.py b/Lib/poplib.py index 0f8587317c2bbc..9a5ef03c983103 100644 --- a/Lib/poplib.py +++ b/Lib/poplib.py @@ -419,35 +419,19 @@ def stls(self, context=None): class POP3_SSL(POP3): """POP3 client class over SSL connection - Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None, - context=None) + Instantiate with: POP3_SSL(hostname, port=995, context=None) hostname - the hostname of the pop3 over ssl server port - port number - keyfile - PEM formatted file that contains your private key - certfile - PEM formatted certificate chain file context - a ssl.SSLContext See the methods of the parent class POP3 for more documentation. """ - def __init__(self, host, port=POP3_SSL_PORT, keyfile=None, certfile=None, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, context=None): - if context is not None and keyfile is not None: - raise ValueError("context and keyfile arguments are mutually " - "exclusive") - if context is not None and certfile is not None: - raise ValueError("context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom context instead", DeprecationWarning, 2) - self.keyfile = keyfile - self.certfile = certfile + def __init__(self, host, port=POP3_SSL_PORT, + *, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, context=None): if context is None: - context = ssl._create_stdlib_context(certfile=certfile, - keyfile=keyfile) + context = ssl._create_stdlib_context() self.context = context POP3.__init__(self, host, port, timeout) @@ -457,7 +441,7 @@ def _create_socket(self, timeout): server_hostname=self.host) return sock - def stls(self, keyfile=None, certfile=None, context=None): + def stls(self, context=None): """The method unconditionally raises an exception since the STLS command doesn't make any sense on an already established SSL/TLS session. diff --git a/Lib/posixpath.py b/Lib/posixpath.py index 5e1ebe3293d849..737f8a5c156d81 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -35,7 +35,7 @@ "samefile","sameopenfile","samestat", "curdir","pardir","sep","pathsep","defpath","altsep","extsep", "devnull","realpath","supports_unicode_filenames","relpath", - "commonpath"] + "commonpath", "isjunction"] def _get_sep(path): @@ -169,6 +169,16 @@ def islink(path): return False return stat.S_ISLNK(st.st_mode) + +# Is a path a junction? + +def isjunction(path): + """Test whether a path is a junction + Junctions are not a part of posix semantics""" + os.fspath(path) + return False + + # Being true for dangling symbolic links is also useful. def lexists(path): @@ -195,6 +205,7 @@ def ismount(path): if stat.S_ISLNK(s1.st_mode): return False + path = os.fspath(path) if isinstance(path, bytes): parent = join(path, b'..') else: diff --git a/Lib/pstats.py b/Lib/pstats.py index 80408313e8b27f..51bcca84188740 100644 --- a/Lib/pstats.py +++ b/Lib/pstats.py @@ -223,8 +223,6 @@ def get_sort_arg_defs(self): for word, tup in self.sort_arg_dict_default.items(): fragment = word while fragment: - if not fragment: - break if fragment in dict: bad_list[fragment] = 0 break diff --git a/Lib/pydoc.py b/Lib/pydoc.py index c79ec77a8c09e4..0a693f45230c93 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -686,9 +686,7 @@ def markup(self, text, escape=None, funcs={}, classes={}, methods={}): r'RFC[- ]?(\d+)|' r'PEP[- ]?(\d+)|' r'(self\.)?(\w+))') - while True: - match = pattern.search(text, here) - if not match: break + while match := pattern.search(text, here): start, end = match.span() results.append(escape(text[here:start])) diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 52a3ee1e1cb5bc..4ba680cceda93a 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Oct 25 00:07:40 2022 +# Autogenerated by Sphinx on Tue Dec 6 19:31:49 2022 topics = {'assert': 'The "assert" statement\n' '**********************\n' '\n' @@ -358,7 +358,7 @@ 'yield_expression)]\n' '\n' 'The difference from normal Assignment statements is that only ' - 'single\n' + 'a single\n' 'target is allowed.\n' '\n' 'For simple names as assignment targets, if in class or module ' @@ -408,12 +408,13 @@ 'analysis\n' ' tools and IDEs.\n' '\n' - 'Changed in version 3.8: Now annotated assignments allow same\n' - 'expressions in the right hand side as the regular ' - 'assignments.\n' - 'Previously, some expressions (like un-parenthesized tuple ' - 'expressions)\n' - 'caused a syntax error.\n', + 'Changed in version 3.8: Now annotated assignments allow the ' + 'same\n' + 'expressions in the right hand side as regular assignments. ' + 'Previously,\n' + 'some expressions (like un-parenthesized tuple expressions) ' + 'caused a\n' + 'syntax error.\n', 'async': 'Coroutines\n' '**********\n' '\n' @@ -7320,7 +7321,7 @@ 'the clauses had been separated out into individual import ' 'statements.\n' '\n' - 'The details of the first step, finding and loading modules are\n' + 'The details of the first step, finding and loading modules, are\n' 'described in greater detail in the section on the import system, ' 'which\n' 'also describes the various types of packages and modules that can ' @@ -11108,8 +11109,9 @@ 'y)" is\n' 'typically invalid without special support in "MyClass". To ' 'be able to\n' - 'use that kind of patterns, the class needs to define a\n' - '*__match_args__* attribute.\n' + 'use that kind of pattern, the class needs to define a ' + '*__match_args__*\n' + 'attribute.\n' '\n' 'object.__match_args__\n' '\n' @@ -11314,6 +11316,10 @@ '*start* and\n' ' *end* are interpreted as in slice notation.\n' '\n' + ' If *sub* is empty, returns the number of empty strings ' + 'between\n' + ' characters which is the length of the string plus one.\n' + '\n' "str.encode(encoding='utf-8', errors='strict')\n" '\n' ' Return an encoded version of the string as a bytes ' @@ -11505,7 +11511,7 @@ 'property\n' ' being one of “Lm”, “Lt”, “Lu”, “Ll”, or “Lo”. Note ' 'that this is\n' - ' different from the “Alphabetic” property defined in the ' + ' different from the Alphabetic property defined in the ' 'Unicode\n' ' Standard.\n' '\n' @@ -11554,9 +11560,9 @@ 'according to the\n' ' language definition, section Identifiers and keywords.\n' '\n' - ' Call "keyword.iskeyword()" to test whether string "s" ' - 'is a reserved\n' - ' identifier, such as "def" and "class".\n' + ' "keyword.iskeyword()" can be used to test whether ' + 'string "s" is a\n' + ' reserved identifier, such as "def" and "class".\n' '\n' ' Example:\n' '\n' @@ -12360,10 +12366,12 @@ '2. As in Standard C, up to three octal digits are accepted.\n' '\n' ' Changed in version 3.11: Octal escapes with value larger than\n' - ' "0o377" produce a "DeprecationWarning". In a future Python ' - 'version\n' - ' they will be a "SyntaxWarning" and eventually a ' - '"SyntaxError".\n' + ' "0o377" produce a "DeprecationWarning".\n' + '\n' + ' Changed in version 3.12: Octal escapes with value larger than\n' + ' "0o377" produce a "SyntaxWarning". In a future Python version ' + 'they\n' + ' will be eventually a "SyntaxError".\n' '\n' '3. Unlike in Standard C, exactly two hex digits are required.\n' '\n' @@ -12398,9 +12406,13 @@ '\n' ' Changed in version 3.6: Unrecognized escape sequences produce ' 'a\n' - ' "DeprecationWarning". In a future Python version they will be ' + ' "DeprecationWarning".\n' + '\n' + ' Changed in version 3.12: Unrecognized escape sequences produce ' 'a\n' - ' "SyntaxWarning" and eventually a "SyntaxError".\n' + ' "SyntaxWarning". In a future Python version they will be ' + 'eventually\n' + ' a "SyntaxError".\n' '\n' 'Even in a raw literal, quotes can be escaped with a backslash, ' 'but the\n' @@ -13974,17 +13986,11 @@ 'dictionaries or\n' 'other mutable types (that are compared by value rather than ' 'by object\n' - 'identity) may not be used as keys. Numeric types used for ' - 'keys obey\n' - 'the normal rules for numeric comparison: if two numbers ' - 'compare equal\n' - '(such as "1" and "1.0") then they can be used ' - 'interchangeably to index\n' - 'the same dictionary entry. (Note however, that since ' - 'computers store\n' - 'floating-point numbers as approximations it is usually ' - 'unwise to use\n' - 'them as dictionary keys.)\n' + 'identity) may not be used as keys. Values that compare equal ' + '(such as\n' + '"1", "1.0", and "True") can be used interchangeably to index ' + 'the same\n' + 'dictionary entry.\n' '\n' 'class dict(**kwargs)\n' 'class dict(mapping, **kwargs)\n' diff --git a/Lib/quopri.py b/Lib/quopri.py index 08899c5cb73a30..f36cf7b3951cda 100755 --- a/Lib/quopri.py +++ b/Lib/quopri.py @@ -67,10 +67,7 @@ def write(s, output=output, lineEnd=b'\n'): output.write(s + lineEnd) prevline = None - while 1: - line = input.readline() - if not line: - break + while line := input.readline(): outline = [] # Strip off any readline induced trailing newline stripped = b'' @@ -126,9 +123,7 @@ def decode(input, output, header=False): return new = b'' - while 1: - line = input.readline() - if not line: break + while line := input.readline(): i, n = 0, len(line) if n > 0 and line[n-1:n] == b'\n': partial = 0; n = n-1 diff --git a/Lib/secrets.py b/Lib/secrets.py index 900381a89f53ec..566a09b7311154 100644 --- a/Lib/secrets.py +++ b/Lib/secrets.py @@ -13,7 +13,6 @@ import base64 -import binascii from hmac import compare_digest from random import SystemRandom @@ -56,7 +55,7 @@ def token_hex(nbytes=None): 'f9bf78b9a18ce6d46a0cd2b0b86df9da' """ - return binascii.hexlify(token_bytes(nbytes)).decode('ascii') + return token_bytes(nbytes).hex() def token_urlsafe(nbytes=None): """Return a random URL-safe text string, in Base64 encoding. diff --git a/Lib/shlex.py b/Lib/shlex.py index a91c9b022627b1..f4821616b62a0f 100644 --- a/Lib/shlex.py +++ b/Lib/shlex.py @@ -333,10 +333,7 @@ def quote(s): def _print_tokens(lexer): - while 1: - tt = lexer.get_token() - if not tt: - break + while tt := lexer.get_token(): print("Token: " + repr(tt)) if __name__ == '__main__': diff --git a/Lib/shutil.py b/Lib/shutil.py index ac1dd530528c0a..867925aa10cc04 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -194,10 +194,7 @@ def copyfileobj(fsrc, fdst, length=0): # Localize variable access to minimize overhead. fsrc_read = fsrc.read fdst_write = fdst.write - while True: - buf = fsrc_read(length) - if not buf: - break + while buf := fsrc_read(length): fdst_write(buf) def _samefile(src, dst): @@ -490,12 +487,13 @@ def _copytree(entries, src, dst, symlinks, ignore, copy_function, # otherwise let the copy occur. copy2 will raise an error if srcentry.is_dir(): copytree(srcobj, dstname, symlinks, ignore, - copy_function, dirs_exist_ok=dirs_exist_ok) + copy_function, ignore_dangling_symlinks, + dirs_exist_ok) else: copy_function(srcobj, dstname) elif srcentry.is_dir(): copytree(srcobj, dstname, symlinks, ignore, copy_function, - dirs_exist_ok=dirs_exist_ok) + ignore_dangling_symlinks, dirs_exist_ok) else: # Will raise a SpecialFileError for unsupported file types copy_function(srcobj, dstname) @@ -564,18 +562,6 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, dirs_exist_ok=dirs_exist_ok) if hasattr(os.stat_result, 'st_file_attributes'): - # Special handling for directory junctions to make them behave like - # symlinks for shutil.rmtree, since in general they do not appear as - # regular links. - def _rmtree_isdir(entry): - try: - st = entry.stat(follow_symlinks=False) - return (stat.S_ISDIR(st.st_mode) and not - (st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT - and st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT)) - except OSError: - return False - def _rmtree_islink(path): try: st = os.lstat(path) @@ -585,12 +571,6 @@ def _rmtree_islink(path): except OSError: return False else: - def _rmtree_isdir(entry): - try: - return entry.is_dir(follow_symlinks=False) - except OSError: - return False - def _rmtree_islink(path): return os.path.islink(path) @@ -604,7 +584,12 @@ def _rmtree_unsafe(path, onerror): entries = [] for entry in entries: fullname = entry.path - if _rmtree_isdir(entry): + try: + is_dir = entry.is_dir(follow_symlinks=False) + except OSError: + is_dir = False + + if is_dir and not entry.is_junction(): try: if entry.is_symlink(): # This can only happen if someone replaces diff --git a/Lib/smtplib.py b/Lib/smtplib.py index 324a1c19f12afe..18c91746fd7bf2 100755 --- a/Lib/smtplib.py +++ b/Lib/smtplib.py @@ -749,14 +749,14 @@ def login(self, user, password, *, initial_response_ok=True): # We could not login successfully. Return result of last attempt. raise last_exception - def starttls(self, keyfile=None, certfile=None, context=None): + def starttls(self, *, context=None): """Puts the connection to the SMTP server into TLS mode. If there has been no previous EHLO or HELO command this session, this method tries ESMTP EHLO first. If the server supports TLS, this will encrypt the rest of the SMTP - session. If you provide the keyfile and certfile parameters, + session. If you provide the context parameter, the identity of the SMTP server and client can be checked. This, however, depends on whether the socket module really checks the certificates. @@ -774,19 +774,8 @@ def starttls(self, keyfile=None, certfile=None, context=None): if resp == 220: if not _have_ssl: raise RuntimeError("No SSL support included in this Python") - if context is not None and keyfile is not None: - raise ValueError("context and keyfile arguments are mutually " - "exclusive") - if context is not None and certfile is not None: - raise ValueError("context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom context instead", DeprecationWarning, 2) if context is None: - context = ssl._create_stdlib_context(certfile=certfile, - keyfile=keyfile) + context = ssl._create_stdlib_context() self.sock = context.wrap_socket(self.sock, server_hostname=self._host) self.file = None @@ -1017,35 +1006,18 @@ class SMTP_SSL(SMTP): compiled with SSL support). If host is not specified, '' (the local host) is used. If port is omitted, the standard SMTP-over-SSL port (465) is used. local_hostname and source_address have the same meaning - as they do in the SMTP class. keyfile and certfile are also optional - - they can contain a PEM formatted private key and certificate chain file - for the SSL connection. context also optional, can contain a - SSLContext, and is an alternative to keyfile and certfile; If it is - specified both keyfile and certfile must be None. + as they do in the SMTP class. context also optional, can contain a + SSLContext. """ default_port = SMTP_SSL_PORT def __init__(self, host='', port=0, local_hostname=None, - keyfile=None, certfile=None, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + *, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, context=None): - if context is not None and keyfile is not None: - raise ValueError("context and keyfile arguments are mutually " - "exclusive") - if context is not None and certfile is not None: - raise ValueError("context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom context instead", DeprecationWarning, 2) - self.keyfile = keyfile - self.certfile = certfile if context is None: - context = ssl._create_stdlib_context(certfile=certfile, - keyfile=keyfile) + context = ssl._create_stdlib_context() self.context = context SMTP.__init__(self, host, port, local_hostname, timeout, source_address) @@ -1127,10 +1099,7 @@ def prompt(prompt): toaddrs = prompt("To").split(',') print("Enter message, end with ^D:") msg = '' - while 1: - line = sys.stdin.readline() - if not line: - break + while line := sys.stdin.readline(): msg = msg + line print("Message length is %d" % len(msg)) diff --git a/Lib/socketserver.py b/Lib/socketserver.py index 30a5cfa59fe05b..842d526b011911 100644 --- a/Lib/socketserver.py +++ b/Lib/socketserver.py @@ -292,8 +292,7 @@ def handle_request(self): selector.register(self, selectors.EVENT_READ) while True: - ready = selector.select(timeout) - if ready: + if selector.select(timeout): return self._handle_request_noblock() else: if timeout is not None: diff --git a/Lib/statistics.py b/Lib/statistics.py index b4adabd3f05ae8..07d1fd5ba6e98e 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -1446,3 +1446,9 @@ def __hash__(self): def __repr__(self): return f'{type(self).__name__}(mu={self._mu!r}, sigma={self._sigma!r})' + + def __getstate__(self): + return self._mu, self._sigma + + def __setstate__(self, state): + self._mu, self._sigma = state diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py index 73c25684db20c9..c61100a6da8503 100644 --- a/Lib/sysconfig.py +++ b/Lib/sysconfig.py @@ -544,7 +544,12 @@ def _init_non_posix(vars): vars['LIBDEST'] = get_path('stdlib') vars['BINLIBDEST'] = get_path('platstdlib') vars['INCLUDEPY'] = get_path('include') - vars['EXT_SUFFIX'] = _imp.extension_suffixes()[0] + try: + # GH-99201: _imp.extension_suffixes may be empty when + # HAVE_DYNAMIC_LOADING is not set. In this case, don't set EXT_SUFFIX. + vars['EXT_SUFFIX'] = _imp.extension_suffixes()[0] + except IndexError: + pass vars['EXE'] = '.exe' vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) diff --git a/Lib/tarfile.py b/Lib/tarfile.py index a08f247f496b3d..d686435d90ad1b 100755 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -57,13 +57,9 @@ grp = None # os.symlink on Windows prior to 6.0 raises NotImplementedError -symlink_exception = (AttributeError, NotImplementedError) -try: - # OSError (winerror=1314) will be raised if the caller does not hold the - # SeCreateSymbolicLinkPrivilege privilege - symlink_exception += (OSError,) -except NameError: - pass +# OSError (winerror=1314) will be raised if the caller does not hold the +# SeCreateSymbolicLinkPrivilege privilege +symlink_exception = (AttributeError, NotImplementedError, OSError) # from tarfile import * __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", @@ -1266,11 +1262,7 @@ def _proc_pax(self, tarfile): # the newline. keyword and value are both UTF-8 encoded strings. regex = re.compile(br"(\d+) ([^=]+)=") pos = 0 - while True: - match = regex.match(buf, pos) - if not match: - break - + while match := regex.match(buf, pos): length, keyword = match.groups() length = int(length) if length == 0: @@ -2343,6 +2335,8 @@ def next(self): # Advance the file pointer. if self.offset != self.fileobj.tell(): + if self.offset == 0: + return None self.fileobj.seek(self.offset - 1) if not self.fileobj.read(1): raise ReadError("unexpected end of data") @@ -2420,10 +2414,8 @@ def _load(self): """Read through the entire archive file and look for readable members. """ - while True: - tarinfo = self.next() - if tarinfo is None: - break + while self.next() is not None: + pass self._loaded = True def _check(self, mode=None): diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index a66f4f5b897cd3..2fa75eb4d11311 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -6042,5 +6042,5 @@ def test_semlock_subclass(self): class SemLock(_multiprocessing.SemLock): pass name = f'test_semlock_subclass-{os.getpid()}' - s = SemLock(1, 0, 10, name, 0) + s = SemLock(1, 0, 10, name, False) _multiprocessing.sem_unlink(name) diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index 4abf33d7cad1b6..bf56cea541d121 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -419,6 +419,48 @@ def hook(event, args): sys._getframe() +def test_threading(): + import _thread + + def hook(event, args): + if event.startswith(("_thread.", "cpython.PyThreadState", "test.")): + print(event, args) + + sys.addaudithook(hook) + + lock = _thread.allocate_lock() + lock.acquire() + + class test_func: + def __repr__(self): return "<test_func>" + def __call__(self): + sys.audit("test.test_func") + lock.release() + + i = _thread.start_new_thread(test_func(), ()) + lock.acquire() + + +def test_threading_abort(): + # Ensures that aborting PyThreadState_New raises the correct exception + import _thread + + class ThreadNewAbortError(Exception): + pass + + def hook(event, args): + if event == "cpython.PyThreadState_New": + raise ThreadNewAbortError() + + sys.addaudithook(hook) + + try: + _thread.start_new_thread(lambda: None, ()) + except ThreadNewAbortError: + # Other exceptions are raised and the test will fail + pass + + def test_wmi_exec_query(): import _wmi @@ -450,6 +492,17 @@ def hook(event, args): syslog.closelog() +def test_not_in_gc(): + import gc + + hook = lambda *a: None + sys.addaudithook(hook) + + for o in gc.get_objects(): + if isinstance(o, list): + assert hook not in o + + if __name__ == "__main__": from test.support import suppress_msvcrt_asserts diff --git a/Lib/test/clinic.test b/Lib/test/clinic.test index 47e3e02490c816..0d844234d9d1f6 100644 --- a/Lib/test/clinic.test +++ b/Lib/test/clinic.test @@ -1740,29 +1740,18 @@ test_str_converter_encoding(PyObject *module, PyObject *const *args, Py_ssize_t goto exit; } return_value = test_str_converter_encoding_impl(module, a, b, c, d, d_length, e, e_length); + /* Post parse cleanup for a */ + PyMem_FREE(a); + /* Post parse cleanup for b */ + PyMem_FREE(b); + /* Post parse cleanup for c */ + PyMem_FREE(c); + /* Post parse cleanup for d */ + PyMem_FREE(d); + /* Post parse cleanup for e */ + PyMem_FREE(e); exit: - /* Cleanup for a */ - if (a) { - PyMem_FREE(a); - } - /* Cleanup for b */ - if (b) { - PyMem_FREE(b); - } - /* Cleanup for c */ - if (c) { - PyMem_FREE(c); - } - /* Cleanup for d */ - if (d) { - PyMem_FREE(d); - } - /* Cleanup for e */ - if (e) { - PyMem_FREE(e); - } - return return_value; } @@ -1770,7 +1759,7 @@ static PyObject * test_str_converter_encoding_impl(PyObject *module, char *a, char *b, char *c, char *d, Py_ssize_t d_length, char *e, Py_ssize_t e_length) -/*[clinic end generated code: output=8acb886a3843f3bc input=eb4c38e1f898f402]*/ +/*[clinic end generated code: output=999c1deecfa15b0a input=eb4c38e1f898f402]*/ /*[clinic input] @@ -3793,7 +3782,7 @@ test_vararg_and_posonly(PyObject *module, PyObject *const *args, Py_ssize_t narg a = args[0]; __clinic_args = PyTuple_New(nargs - 1); for (Py_ssize_t i = 0; i < nargs - 1; ++i) { - PyTuple_SET_ITEM(__clinic_args, i, args[1 + i]); + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[1 + i])); } return_value = test_vararg_and_posonly_impl(module, a, __clinic_args); @@ -3804,7 +3793,7 @@ exit: static PyObject * test_vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args) -/*[clinic end generated code: output=548bca3a127c22c1 input=08dc2bf7afbf1613]*/ +/*[clinic end generated code: output=081a953b8cbe7617 input=08dc2bf7afbf1613]*/ /*[clinic input] test_vararg @@ -3856,7 +3845,6 @@ test_vararg(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject }; #undef KWTUPLE PyObject *argsbuf[2]; - Py_ssize_t noptargs = 0 + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; PyObject *a; PyObject *__clinic_args = NULL; @@ -3875,7 +3863,7 @@ exit: static PyObject * test_vararg_impl(PyObject *module, PyObject *a, PyObject *args) -/*[clinic end generated code: output=6661f3ca97d85e8c input=81d33815ad1bae6e]*/ +/*[clinic end generated code: output=880365c61ae205d7 input=81d33815ad1bae6e]*/ /*[clinic input] test_vararg_with_default @@ -3929,7 +3917,7 @@ test_vararg_with_default(PyObject *module, PyObject *const *args, Py_ssize_t nar }; #undef KWTUPLE PyObject *argsbuf[3]; - Py_ssize_t noptargs = 0 + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; PyObject *a; PyObject *__clinic_args = NULL; int b = 0; @@ -3958,7 +3946,7 @@ exit: static PyObject * test_vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, int b) -/*[clinic end generated code: output=5fe3cfccb1bef781 input=6e110b54acd9b22d]*/ +/*[clinic end generated code: output=291e9a5a09831128 input=6e110b54acd9b22d]*/ /*[clinic input] test_vararg_with_only_defaults diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index bba96698e9e2eb..121d973b6d5f20 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1489,6 +1489,9 @@ def test_strftime(self): #check that this standard extension works t.strftime("%f") + # bpo-41260: The parameter was named "fmt" in the pure python impl. + t.strftime(format="%f") + def test_strftime_trailing_percent(self): # bpo-35066: Make sure trailing '%' doesn't cause datetime's strftime to # complain. Different libcs have different handling of trailing diff --git a/Lib/test/inspect_fodder2.py b/Lib/test/inspect_fodder2.py index e7d4b53ebefcc6..2dc49817087c44 100644 --- a/Lib/test/inspect_fodder2.py +++ b/Lib/test/inspect_fodder2.py @@ -259,3 +259,17 @@ def all_markers_with_args_and_kwargs(a, b, /, c, d, *args, e, f, **kwargs): #line 259 def all_markers_with_defaults(a, b=1, /, c=2, d=3, *, e=4, f=5): pass + +# line 263 +def deco_factory(**kwargs): + def deco(f): + @wraps(f) + def wrapper(*a, **kwd): + kwd.update(kwargs) + return f(*a, **kwd) + return wrapper + return deco + +@deco_factory(foo=(1 + 2), bar=lambda: 1) +def complex_decorated(foo=0, bar=lambda: 0): + return foo + bar() diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 655e4d2e56f8f0..19ccf2db5e7f06 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -17,7 +17,8 @@ ChildError, DidNotRun) from test.libregrtest.setup import setup_tests from test.libregrtest.pgo import setup_pgo_tests -from test.libregrtest.utils import removepy, count, format_duration, printlist +from test.libregrtest.utils import (removepy, count, format_duration, + printlist, get_build_info) from test import support from test.support import os_helper from test.support import threading_helper @@ -28,6 +29,11 @@ # Must be smaller than buildbot "1200 seconds without output" limit. EXIT_TIMEOUT = 120.0 +EXITCODE_BAD_TEST = 2 +EXITCODE_INTERRUPTED = 130 +EXITCODE_ENV_CHANGED = 3 +EXITCODE_NO_TESTS_RAN = 4 + class Regrtest: """Execute a test suite. @@ -486,6 +492,7 @@ def display_header(self): print("==", platform.python_implementation(), *sys.version.split()) print("==", platform.platform(aliased=True), "%s-endian" % sys.byteorder) + print("== Python build:", ' '.join(get_build_info())) print("== cwd:", os.getcwd()) cpu_count = os.cpu_count() if cpu_count: @@ -493,15 +500,18 @@ def display_header(self): print("== encodings: locale=%s, FS=%s" % (locale.getencoding(), sys.getfilesystemencoding())) + def no_tests_run(self): + return not any((self.good, self.bad, self.skipped, self.interrupted, + self.environment_changed)) + def get_tests_result(self): result = [] if self.bad: result.append("FAILURE") elif self.ns.fail_env_changed and self.environment_changed: result.append("ENV CHANGED") - elif not any((self.good, self.bad, self.skipped, self.interrupted, - self.environment_changed)): - result.append("NO TEST RUN") + elif self.no_tests_run(): + result.append("NO TESTS RAN") if self.interrupted: result.append("INTERRUPTED") @@ -750,11 +760,13 @@ def _main(self, tests, kwargs): self.save_xml_result() if self.bad: - sys.exit(2) + sys.exit(EXITCODE_BAD_TEST) if self.interrupted: - sys.exit(130) + sys.exit(EXITCODE_INTERRUPTED) if self.ns.fail_env_changed and self.environment_changed: - sys.exit(3) + sys.exit(EXITCODE_ENV_CHANGED) + if self.no_tests_run(): + sys.exit(EXITCODE_NO_TESTS_RAN) sys.exit(0) diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index a0538cbb3c3772..4298fa806e1065 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -73,7 +73,6 @@ def get_pooled_int(value): fd_deltas = [0] * repcount getallocatedblocks = sys.getallocatedblocks gettotalrefcount = sys.gettotalrefcount - _getquickenedcount = sys._getquickenedcount fd_count = os_helper.fd_count # initialize variables to make pyflakes quiet rc_before = alloc_before = fd_before = 0 @@ -93,7 +92,7 @@ def get_pooled_int(value): support.gc_collect() # Read memory statistics immediately after the garbage collection - alloc_after = getallocatedblocks() - _getquickenedcount() + alloc_after = getallocatedblocks() rc_after = gettotalrefcount() fd_after = fd_count() diff --git a/Lib/test/libregrtest/save_env.py b/Lib/test/libregrtest/save_env.py index 60c9be24617a65..cc5870ab2b833e 100644 --- a/Lib/test/libregrtest/save_env.py +++ b/Lib/test/libregrtest/save_env.py @@ -161,11 +161,11 @@ def restore_warnings_filters(self, saved_filters): warnings.filters[:] = saved_filters[2] def get_asyncore_socket_map(self): - asyncore = sys.modules.get('asyncore') + asyncore = sys.modules.get('test.support.asyncore') # XXX Making a copy keeps objects alive until __exit__ gets called. return asyncore and asyncore.socket_map.copy() or {} def restore_asyncore_socket_map(self, saved_map): - asyncore = sys.modules.get('asyncore') + asyncore = sys.modules.get('test.support.asyncore') if asyncore is not None: asyncore.close_all(ignore_all=True) asyncore.socket_map.update(saved_map) diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 332dcc4c6db247..fb13fa0e243ba7 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -1,6 +1,7 @@ import math import os.path import sys +import sysconfig import textwrap from test import support @@ -124,15 +125,6 @@ def clear_caches(): if stream is not None: stream.flush() - # Clear assorted module caches. - # Don't worry about resetting the cache if the module is not loaded - try: - distutils_dir_util = sys.modules['distutils.dir_util'] - except KeyError: - pass - else: - distutils_dir_util._path_created.clear() - try: re = sys.modules['re'] except KeyError: @@ -217,3 +209,87 @@ def clear_caches(): pass else: fractions._hash_algorithm.cache_clear() + + +def get_build_info(): + # Get most important configure and build options as a list of strings. + # Example: ['debug', 'ASAN+MSAN'] or ['release', 'LTO+PGO']. + + config_args = sysconfig.get_config_var('CONFIG_ARGS') or '' + cflags = sysconfig.get_config_var('PY_CFLAGS') or '' + cflags_nodist = sysconfig.get_config_var('PY_CFLAGS_NODIST') or '' + ldflags_nodist = sysconfig.get_config_var('PY_LDFLAGS_NODIST') or '' + + build = [] + if hasattr(sys, 'gettotalrefcount'): + # --with-pydebug + build.append('debug') + + if '-DNDEBUG' in (cflags + cflags_nodist): + build.append('without_assert') + else: + build.append('release') + + if '--with-assertions' in config_args: + build.append('with_assert') + elif '-DNDEBUG' not in (cflags + cflags_nodist): + build.append('with_assert') + + # --enable-framework=name + framework = sysconfig.get_config_var('PYTHONFRAMEWORK') + if framework: + build.append(f'framework={framework}') + + # --enable-shared + shared = int(sysconfig.get_config_var('PY_ENABLE_SHARED') or '0') + if shared: + build.append('shared') + + # --with-lto + optimizations = [] + if '-flto=thin' in ldflags_nodist: + optimizations.append('ThinLTO') + elif '-flto' in ldflags_nodist: + optimizations.append('LTO') + + # --enable-optimizations + pgo_options = ( + # GCC + '-fprofile-use', + # clang: -fprofile-instr-use=code.profclangd + '-fprofile-instr-use', + # ICC + "-prof-use", + ) + if any(option in cflags_nodist for option in pgo_options): + optimizations.append('PGO') + if optimizations: + build.append('+'.join(optimizations)) + + # --with-address-sanitizer + sanitizers = [] + if support.check_sanitizer(address=True): + sanitizers.append("ASAN") + # --with-memory-sanitizer + if support.check_sanitizer(memory=True): + sanitizers.append("MSAN") + # --with-undefined-behavior-sanitizer + if support.check_sanitizer(ub=True): + sanitizers.append("UBSAN") + if sanitizers: + build.append('+'.join(sanitizers)) + + # --with-trace-refs + if hasattr(sys, 'getobjects'): + build.append("TraceRefs") + # --enable-pystats + if hasattr(sys, '_stats_on'): + build.append("pystats") + # --with-valgrind + if sysconfig.get_config_var('WITH_VALGRIND'): + build.append("valgrind") + # --with-dtrace + if sysconfig.get_config_var('WITH_DTRACE'): + build.append("dtrace") + + return build diff --git a/Lib/test/pydocfodder.py b/Lib/test/pydocfodder.py index 2530320a222726..d0750e5a43c0c0 100644 --- a/Lib/test/pydocfodder.py +++ b/Lib/test/pydocfodder.py @@ -2,85 +2,7 @@ import types -class A_classic: - "A classic class." - def A_method(self): - "Method defined in A." - def AB_method(self): - "Method defined in A and B." - def AC_method(self): - "Method defined in A and C." - def AD_method(self): - "Method defined in A and D." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - - -class B_classic(A_classic): - "A classic class, derived from A_classic." - def AB_method(self): - "Method defined in A and B." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def B_method(self): - "Method defined in B." - def BC_method(self): - "Method defined in B and C." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - -class C_classic(A_classic): - "A classic class, derived from A_classic." - def AC_method(self): - "Method defined in A and C." - def ABC_method(self): - "Method defined in A, B and C." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BC_method(self): - "Method defined in B and C." - def BCD_method(self): - "Method defined in B, C and D." - def C_method(self): - "Method defined in C." - def CD_method(self): - "Method defined in C and D." - -class D_classic(B_classic, C_classic): - "A classic class, derived from B_classic and C_classic." - def AD_method(self): - "Method defined in A and D." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - def CD_method(self): - "Method defined in C and D." - def D_method(self): - "Method defined in D." - - -class A_new(object): +class A_new: "A new-style class." def A_method(self): diff --git a/Lib/test/smtpd.py b/Lib/test/smtpd.py index f9d4b048a83f68..6052232ec2b585 100755 --- a/Lib/test/smtpd.py +++ b/Lib/test/smtpd.py @@ -77,7 +77,7 @@ import time import socket import collections -from test.support.import_helper import import_module +from test.support import asyncore, asynchat from warnings import warn from email._header_value_parser import get_addr_spec, get_angle_addr @@ -85,9 +85,6 @@ "SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy", ] -asyncore = import_module('asyncore', deprecated=True) -asynchat = import_module('asynchat', deprecated=True) - program = sys.argv[0] __version__ = 'Python SMTP proxy version 0.3' diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 1dee708deb8c09..a631bfc80cfaf0 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -2097,7 +2097,7 @@ def wait_process(pid, *, exitcode, timeout=None): Raise an AssertionError if the process exit code is not equal to exitcode. - If the process runs longer than timeout seconds (SHORT_TIMEOUT by default), + If the process runs longer than timeout seconds (LONG_TIMEOUT by default), kill the process (if signal.SIGKILL is available) and raise an AssertionError. The timeout feature is not available on Windows. """ @@ -2105,7 +2105,7 @@ def wait_process(pid, *, exitcode, timeout=None): import signal if timeout is None: - timeout = SHORT_TIMEOUT + timeout = LONG_TIMEOUT start_time = time.monotonic() for _ in sleeping_retry(timeout, error=False): diff --git a/Lib/asynchat.py b/Lib/test/support/asynchat.py similarity index 97% rename from Lib/asynchat.py rename to Lib/test/support/asynchat.py index bed797e989e136..38c47a1fda683e 100644 --- a/Lib/asynchat.py +++ b/Lib/test/support/asynchat.py @@ -1,3 +1,8 @@ +# TODO: This module was deprecated and removed from CPython 3.12 +# Now it is a test-only helper. Any attempts to rewrite exising tests that +# are using this module and remove it completely are appreciated! +# See: https://github.com/python/cpython/issues/72719 + # -*- Mode: Python; tab-width: 4 -*- # Id: asynchat.py,v 2.26 2000/09/07 22:29:26 rushing Exp # Author: Sam Rushing <rushing@nightmare.com> @@ -45,15 +50,10 @@ method) up to the terminator, and then control will be returned to you - by calling your self.found_terminator() method. """ -import asyncore -from collections import deque -from warnings import _deprecated - -_DEPRECATION_MSG = ('The {name} module is deprecated and will be removed in ' - 'Python {remove}. The recommended replacement is asyncio') -_deprecated(__name__, _DEPRECATION_MSG, remove=(3, 12)) +from collections import deque +from test.support import asyncore class async_chat(asyncore.dispatcher): diff --git a/Lib/asyncore.py b/Lib/test/support/asyncore.py similarity index 98% rename from Lib/asyncore.py rename to Lib/test/support/asyncore.py index 57c86871f3dcf0..401fa60bcf35f2 100644 --- a/Lib/asyncore.py +++ b/Lib/test/support/asyncore.py @@ -1,3 +1,8 @@ +# TODO: This module was deprecated and removed from CPython 3.12 +# Now it is a test-only helper. Any attempts to rewrite exising tests that +# are using this module and remove it completely are appreciated! +# See: https://github.com/python/cpython/issues/72719 + # -*- Mode: Python -*- # Id: asyncore.py,v 2.51 2000/09/07 22:29:26 rushing Exp # Author: Sam Rushing <rushing@nightmare.com> @@ -57,10 +62,6 @@ ENOTCONN, ESHUTDOWN, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, \ errorcode -_DEPRECATION_MSG = ('The {name} module is deprecated and will be removed in ' - 'Python {remove}. The recommended replacement is asyncio') -warnings._deprecated(__name__, _DEPRECATION_MSG, remove=(3, 12)) - _DISCONNECTED = frozenset({ECONNRESET, ENOTCONN, ESHUTDOWN, ECONNABORTED, EPIPE, EBADF}) diff --git a/Lib/test/support/bytecode_helper.py b/Lib/test/support/bytecode_helper.py index eb4ae1a68e3fc1..65ae7a227baafe 100644 --- a/Lib/test/support/bytecode_helper.py +++ b/Lib/test/support/bytecode_helper.py @@ -3,7 +3,7 @@ import unittest import dis import io -from _testinternalcapi import optimize_cfg +from _testinternalcapi import compiler_codegen, optimize_cfg _UNSPECIFIED = object() @@ -44,8 +44,7 @@ def assertNotInBytecode(self, x, opname, argval=_UNSPECIFIED): msg = msg % (opname, argval, disassembly) self.fail(msg) - -class CfgOptimizationTestCase(unittest.TestCase): +class CompilationStepTestCase(unittest.TestCase): HAS_ARG = set(dis.hasarg) HAS_TARGET = set(dis.hasjrel + dis.hasjabs + dis.hasexc) @@ -58,24 +57,35 @@ def Label(self): self.last_label += 1 return self.last_label - def complete_insts_info(self, insts): - # fill in omitted fields in location, and oparg 0 for ops with no arg. - instructions = [] - for item in insts: - if isinstance(item, int): - instructions.append(item) - else: - assert isinstance(item, tuple) - inst = list(reversed(item)) - opcode = dis.opmap[inst.pop()] - oparg = inst.pop() if opcode in self.HAS_ARG_OR_TARGET else 0 - loc = inst + [-1] * (4 - len(inst)) - instructions.append((opcode, oparg, *loc)) - return instructions + def assertInstructionsMatch(self, actual_, expected_): + # get two lists where each entry is a label or + # an instruction tuple. Compare them, while mapping + # each actual label to a corresponding expected label + # based on their locations. + + self.assertIsInstance(actual_, list) + self.assertIsInstance(expected_, list) + + actual = self.normalize_insts(actual_) + expected = self.normalize_insts(expected_) + self.assertEqual(len(actual), len(expected)) + + # compare instructions + for act, exp in zip(actual, expected): + if isinstance(act, int): + self.assertEqual(exp, act) + continue + self.assertIsInstance(exp, tuple) + self.assertIsInstance(act, tuple) + # crop comparison to the provided expected values + if len(act) > len(exp): + act = act[:len(exp)] + self.assertEqual(exp, act) def normalize_insts(self, insts): """ Map labels to instruction index. Remove labels which are not used as jump targets. + Map opcodes to opnames. """ labels_map = {} targets = set() @@ -107,31 +117,32 @@ def normalize_insts(self, insts): res.append((opcode, arg, *loc)) return res - def get_optimized(self, insts, consts): - insts = self.complete_insts_info(insts) - insts = optimize_cfg(insts, consts) - return insts, consts - def compareInstructions(self, actual_, expected_): - # get two lists where each entry is a label or - # an instruction tuple. Compare them, while mapping - # each actual label to a corresponding expected label - # based on their locations. +class CodegenTestCase(CompilationStepTestCase): - self.assertIsInstance(actual_, list) - self.assertIsInstance(expected_, list) + def generate_code(self, ast): + insts = compiler_codegen(ast, "my_file.py", 0) + return insts - actual = self.normalize_insts(actual_) - expected = self.normalize_insts(expected_) - self.assertEqual(len(actual), len(expected)) - # compare instructions - for act, exp in zip(actual, expected): - if isinstance(act, int): - self.assertEqual(exp, act) - continue - self.assertIsInstance(exp, tuple) - self.assertIsInstance(act, tuple) - # pad exp with -1's (if location info is incomplete) - exp += (-1,) * (len(act) - len(exp)) - self.assertEqual(exp, act) +class CfgOptimizationTestCase(CompilationStepTestCase): + + def complete_insts_info(self, insts): + # fill in omitted fields in location, and oparg 0 for ops with no arg. + instructions = [] + for item in insts: + if isinstance(item, int): + instructions.append(item) + else: + assert isinstance(item, tuple) + inst = list(reversed(item)) + opcode = dis.opmap[inst.pop()] + oparg = inst.pop() if opcode in self.HAS_ARG_OR_TARGET else 0 + loc = inst + [-1] * (4 - len(inst)) + instructions.append((opcode, oparg, *loc)) + return instructions + + def get_optimized(self, insts, consts): + insts = self.complete_insts_info(insts) + insts = optimize_cfg(insts, consts) + return insts, consts diff --git a/Lib/test/test__opcode.py b/Lib/test/test__opcode.py index 704d19fffd09e6..db831069c7aeb8 100644 --- a/Lib/test/test__opcode.py +++ b/Lib/test/test__opcode.py @@ -72,9 +72,10 @@ def test_specialization_stats(self): stat_names = opcode._specialization_stats specialized_opcodes = [ - op[:-len("_ADAPTIVE")].lower() for - op in opcode._specialized_instructions - if op.endswith("_ADAPTIVE")] + op.lower() + for op in opcode._specializations + if opcode._inline_cache_entries[opcode.opmap[op]] + ] self.assertIn('load_attr', specialized_opcodes) self.assertIn('binary_subscr', specialized_opcodes) diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 66f29b95af10c3..18900bb9f7162c 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -295,8 +295,8 @@ def clean_up_channels(): class TestBase(unittest.TestCase): def tearDown(self): - clean_up_interpreters() clean_up_channels() + clean_up_interpreters() ################################## @@ -386,7 +386,6 @@ def test_types(self): self._assert_values([ b'spam', 9999, - self.cid, ]) def test_bytes(self): @@ -412,6 +411,15 @@ def test_non_shareable_int(self): interpreters.channel_send(self.cid, i) +class ModuleTests(TestBase): + + def test_import_in_interpreter(self): + _run_output( + interpreters.create(), + 'import _xxsubinterpreters as _interpreters', + ) + + ################################## # interpreter tests @@ -1213,6 +1221,18 @@ def test_equality(self): self.assertFalse(cid1 != cid2) self.assertTrue(cid1 != cid3) + def test_shareable(self): + chan = interpreters.channel_create() + + obj = interpreters.channel_create() + interpreters.channel_send(chan, obj) + got = interpreters.channel_recv(chan) + + self.assertEqual(got, obj) + self.assertIs(type(got), type(obj)) + # XXX Check the following in the channel tests? + #self.assertIsNot(got, obj) + class ChannelTests(TestBase): @@ -1545,6 +1565,19 @@ def test_recv_default(self): self.assertEqual(obj5, b'eggs') self.assertIs(obj6, default) + def test_recv_sending_interp_destroyed(self): + cid = interpreters.channel_create() + interp = interpreters.create() + interpreters.run_string(interp, dedent(f""" + import _xxsubinterpreters as _interpreters + _interpreters.channel_send({cid}, b'spam') + """)) + interpreters.destroy(interp) + + with self.assertRaisesRegex(RuntimeError, + 'unrecognized interpreter ID'): + interpreters.channel_recv(cid) + def test_run_string_arg_unresolved(self): cid = interpreters.channel_create() interp = interpreters.create() diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py index a083236fb0fc44..86f31a9acb4d55 100644 --- a/Lib/test/test_abc.py +++ b/Lib/test/test_abc.py @@ -154,7 +154,7 @@ class C(metaclass=abc_ABCMeta): @abc.abstractmethod def method_one(self): pass - msg = r"class C without an implementation for abstract method method_one" + msg = r"class C without an implementation for abstract method 'method_one'" self.assertRaisesRegex(TypeError, msg, C) def test_object_new_with_many_abstractmethods(self): @@ -165,7 +165,7 @@ def method_one(self): @abc.abstractmethod def method_two(self): pass - msg = r"class C without an implementation for abstract methods method_one, method_two" + msg = r"class C without an implementation for abstract methods 'method_one', 'method_two'" self.assertRaisesRegex(TypeError, msg, C) def test_abstractmethod_integration(self): @@ -535,7 +535,7 @@ def updated_foo(self): A.foo = updated_foo abc.update_abstractmethods(A) self.assertEqual(A.__abstractmethods__, {'foo', 'bar'}) - msg = "class A without an implementation for abstract methods bar, foo" + msg = "class A without an implementation for abstract methods 'bar', 'foo'" self.assertRaisesRegex(TypeError, msg, A) def test_update_implementation(self): @@ -547,7 +547,7 @@ def foo(self): class B(A): pass - msg = "class B without an implementation for abstract method foo" + msg = "class B without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, B) self.assertEqual(B.__abstractmethods__, {'foo'}) @@ -605,7 +605,7 @@ def foo(self): abc.update_abstractmethods(B) - msg = "class B without an implementation for abstract method foo" + msg = "class B without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, B) def test_update_layered_implementation(self): @@ -627,7 +627,7 @@ def foo(self): abc.update_abstractmethods(C) - msg = "class C without an implementation for abstract method foo" + msg = "class C without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, C) def test_update_multi_inheritance(self): diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index 2b7f008d38564b..cabb2f837693ff 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -296,7 +296,7 @@ class TestOptionalsSingleDashCombined(ParserTestCase): Sig('-z'), ] failures = ['a', '--foo', '-xa', '-x --foo', '-x -z', '-z -x', - '-yx', '-yz a', '-yyyx', '-yyyza', '-xyza'] + '-yx', '-yz a', '-yyyx', '-yyyza', '-xyza', '-x='] successes = [ ('', NS(x=False, yyy=None, z=None)), ('-x', NS(x=True, yyy=None, z=None)), diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index b34644118d2815..ab6a63faa59085 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -837,7 +837,8 @@ def check_limit(prefix, repeated): details = "Compiling ({!r} + {!r} * {})".format( prefix, repeated, depth) with self.assertRaises(RecursionError, msg=details): - ast.parse(broken) + with support.infinite_recursion(): + ast.parse(broken) check_limit("a", "()") check_limit("a", ".b") @@ -1036,6 +1037,18 @@ def test_increment_lineno(self): self.assertEqual(ast.increment_lineno(src).lineno, 2) self.assertIsNone(ast.increment_lineno(src).end_lineno) + def test_increment_lineno_on_module(self): + src = ast.parse(dedent("""\ + a = 1 + b = 2 # type: ignore + c = 3 + d = 4 # type: ignore@tag + """), type_comments=True) + ast.increment_lineno(src, n=5) + self.assertEqual(src.type_ignores[0].lineno, 7) + self.assertEqual(src.type_ignores[1].lineno, 9) + self.assertEqual(src.type_ignores[1].tag, '@tag') + def test_iter_fields(self): node = ast.parse('foo()', mode='eval') d = dict(ast.iter_fields(node.body)) diff --git a/Lib/test/test_asyncgen.py b/Lib/test/test_asyncgen.py index f6184c0cab4694..0421efdbf9dac9 100644 --- a/Lib/test/test_asyncgen.py +++ b/Lib/test/test_asyncgen.py @@ -378,6 +378,19 @@ async def async_gen_wrapper(): self.compare_generators(sync_gen_wrapper(), async_gen_wrapper()) + def test_async_gen_exception_12(self): + async def gen(): + await anext(me) + yield 123 + + me = gen() + ai = me.__aiter__() + an = ai.__anext__() + + with self.assertRaisesRegex(RuntimeError, + r'anext\(\): asynchronous generator is already running'): + an.__next__() + def test_async_gen_3_arg_deprecation_warning(self): async def gen(): yield 123 diff --git a/Lib/test/test_asynchat.py b/Lib/test/test_asynchat.py deleted file mode 100644 index 9d08bd02968fd6..00000000000000 --- a/Lib/test/test_asynchat.py +++ /dev/null @@ -1,293 +0,0 @@ -# test asynchat - -from test import support -from test.support import socket_helper -from test.support import threading_helper -from test.support import warnings_helper - -import errno -import socket -import sys -import threading -import time -import unittest -import unittest.mock - - -asynchat = warnings_helper.import_deprecated('asynchat') -asyncore = warnings_helper.import_deprecated('asyncore') - -support.requires_working_socket(module=True) - -HOST = socket_helper.HOST -SERVER_QUIT = b'QUIT\n' - - -class echo_server(threading.Thread): - # parameter to determine the number of bytes passed back to the - # client each send - chunk_size = 1 - - def __init__(self, event): - threading.Thread.__init__(self) - self.event = event - self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.port = socket_helper.bind_port(self.sock) - # This will be set if the client wants us to wait before echoing - # data back. - self.start_resend_event = None - - def run(self): - self.sock.listen() - self.event.set() - conn, client = self.sock.accept() - self.buffer = b"" - # collect data until quit message is seen - while SERVER_QUIT not in self.buffer: - data = conn.recv(1) - if not data: - break - self.buffer = self.buffer + data - - # remove the SERVER_QUIT message - self.buffer = self.buffer.replace(SERVER_QUIT, b'') - - if self.start_resend_event: - self.start_resend_event.wait() - - # re-send entire set of collected data - try: - # this may fail on some tests, such as test_close_when_done, - # since the client closes the channel when it's done sending - while self.buffer: - n = conn.send(self.buffer[:self.chunk_size]) - time.sleep(0.001) - self.buffer = self.buffer[n:] - except: - pass - - conn.close() - self.sock.close() - -class echo_client(asynchat.async_chat): - - def __init__(self, terminator, server_port): - asynchat.async_chat.__init__(self) - self.contents = [] - self.create_socket(socket.AF_INET, socket.SOCK_STREAM) - self.connect((HOST, server_port)) - self.set_terminator(terminator) - self.buffer = b"" - - def handle_connect(self): - pass - - if sys.platform == 'darwin': - # select.poll returns a select.POLLHUP at the end of the tests - # on darwin, so just ignore it - def handle_expt(self): - pass - - def collect_incoming_data(self, data): - self.buffer += data - - def found_terminator(self): - self.contents.append(self.buffer) - self.buffer = b"" - -def start_echo_server(): - event = threading.Event() - s = echo_server(event) - s.start() - event.wait() - event.clear() - time.sleep(0.01) # Give server time to start accepting. - return s, event - - -class TestAsynchat(unittest.TestCase): - usepoll = False - - def setUp(self): - self._threads = threading_helper.threading_setup() - - def tearDown(self): - threading_helper.threading_cleanup(*self._threads) - - def line_terminator_check(self, term, server_chunk): - event = threading.Event() - s = echo_server(event) - s.chunk_size = server_chunk - s.start() - event.wait() - event.clear() - time.sleep(0.01) # Give server time to start accepting. - c = echo_client(term, s.port) - c.push(b"hello ") - c.push(b"world" + term) - c.push(b"I'm not dead yet!" + term) - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) - - # the line terminator tests below check receiving variously-sized - # chunks back from the server in order to exercise all branches of - # async_chat.handle_read - - def test_line_terminator1(self): - # test one-character terminator - for l in (1, 2, 3): - self.line_terminator_check(b'\n', l) - - def test_line_terminator2(self): - # test two-character terminator - for l in (1, 2, 3): - self.line_terminator_check(b'\r\n', l) - - def test_line_terminator3(self): - # test three-character terminator - for l in (1, 2, 3): - self.line_terminator_check(b'qqq', l) - - def numeric_terminator_check(self, termlen): - # Try reading a fixed number of bytes - s, event = start_echo_server() - c = echo_client(termlen, s.port) - data = b"hello world, I'm not dead yet!\n" - c.push(data) - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [data[:termlen]]) - - def test_numeric_terminator1(self): - # check that ints & longs both work (since type is - # explicitly checked in async_chat.handle_read) - self.numeric_terminator_check(1) - - def test_numeric_terminator2(self): - self.numeric_terminator_check(6) - - def test_none_terminator(self): - # Try reading a fixed number of bytes - s, event = start_echo_server() - c = echo_client(None, s.port) - data = b"hello world, I'm not dead yet!\n" - c.push(data) - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, []) - self.assertEqual(c.buffer, data) - - def test_simple_producer(self): - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - data = b"hello world\nI'm not dead yet!\n" - p = asynchat.simple_producer(data+SERVER_QUIT, buffer_size=8) - c.push_with_producer(p) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) - - def test_string_producer(self): - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - data = b"hello world\nI'm not dead yet!\n" - c.push_with_producer(data+SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) - - def test_empty_line(self): - # checks that empty lines are handled correctly - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - c.push(b"hello world\n\nI'm not dead yet!\n") - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, - [b"hello world", b"", b"I'm not dead yet!"]) - - def test_close_when_done(self): - s, event = start_echo_server() - s.start_resend_event = threading.Event() - c = echo_client(b'\n', s.port) - c.push(b"hello world\nI'm not dead yet!\n") - c.push(SERVER_QUIT) - c.close_when_done() - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - - # Only allow the server to start echoing data back to the client after - # the client has closed its connection. This prevents a race condition - # where the server echoes all of its data before we can check that it - # got any down below. - s.start_resend_event.set() - threading_helper.join_thread(s) - - self.assertEqual(c.contents, []) - # the server might have been able to send a byte or two back, but this - # at least checks that it received something and didn't just fail - # (which could still result in the client not having received anything) - self.assertGreater(len(s.buffer), 0) - - def test_push(self): - # Issue #12523: push() should raise a TypeError if it doesn't get - # a bytes string - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - data = b'bytes\n' - c.push(data) - c.push(bytearray(data)) - c.push(memoryview(data)) - self.assertRaises(TypeError, c.push, 10) - self.assertRaises(TypeError, c.push, 'unicode') - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - self.assertEqual(c.contents, [b'bytes', b'bytes', b'bytes']) - - -class TestAsynchat_WithPoll(TestAsynchat): - usepoll = True - - -class TestAsynchatMocked(unittest.TestCase): - def test_blockingioerror(self): - # Issue #16133: handle_read() must ignore BlockingIOError - sock = unittest.mock.Mock() - sock.recv.side_effect = BlockingIOError(errno.EAGAIN) - - dispatcher = asynchat.async_chat() - dispatcher.set_socket(sock) - self.addCleanup(dispatcher.del_channel) - - with unittest.mock.patch.object(dispatcher, 'handle_error') as error: - dispatcher.handle_read() - self.assertFalse(error.called) - - -class TestHelperFunctions(unittest.TestCase): - def test_find_prefix_at_end(self): - self.assertEqual(asynchat.find_prefix_at_end("qwerty\r", "\r\n"), 1) - self.assertEqual(asynchat.find_prefix_at_end("qwertydkjf", "\r\n"), 0) - - -class TestNotConnected(unittest.TestCase): - def test_disallow_negative_terminator(self): - # Issue #11259 - client = asynchat.async_chat() - self.assertRaises(ValueError, client.set_terminator, -1) - - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 2dcb20c1cec7f9..3b4026cb73869a 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -746,7 +746,7 @@ async def coro(): def test_env_var_debug(self): code = '\n'.join(( 'import asyncio', - 'loop = asyncio.get_event_loop()', + 'loop = asyncio.new_event_loop()', 'print(loop.get_debug())')) # Test with -E to not fail if the unit test was run with @@ -861,20 +861,15 @@ async def raise_keyboard_interrupt(): self.loop._process_events = mock.Mock() - try: + with self.assertRaises(KeyboardInterrupt): self.loop.run_until_complete(raise_keyboard_interrupt()) - except KeyboardInterrupt: - pass def func(): self.loop.stop() func.called = True func.called = False - try: - self.loop.call_soon(func) - self.loop.run_forever() - except KeyboardInterrupt: - pass + self.loop.call_soon(self.loop.call_soon, func) + self.loop.run_forever() self.assertTrue(func.called) def test_single_selecter_event_callback_after_stopping(self): @@ -2052,11 +2047,11 @@ def prepare(self): def cleanup(): server.close() - self.run_loop(server.wait_closed()) sock.close() if proto.transport is not None: proto.transport.close() self.run_loop(proto.wait_closed()) + self.run_loop(server.wait_closed()) self.addCleanup(cleanup) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index cabe75f56d9fb0..153b2de8172273 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -2550,29 +2550,8 @@ def test_event_loop_policy(self): def test_get_event_loop(self): policy = asyncio.DefaultEventLoopPolicy() self.assertIsNone(policy._local._loop) - - loop = policy.get_event_loop() - self.assertIsInstance(loop, asyncio.AbstractEventLoop) - - self.assertIs(policy._local._loop, loop) - self.assertIs(loop, policy.get_event_loop()) - loop.close() - - def test_get_event_loop_calls_set_event_loop(self): - policy = asyncio.DefaultEventLoopPolicy() - - with mock.patch.object( - policy, "set_event_loop", - wraps=policy.set_event_loop) as m_set_event_loop: - - loop = policy.get_event_loop() - - # policy._local._loop must be set through .set_event_loop() - # (the unix DefaultEventLoopPolicy needs this call to attach - # the child watcher correctly) - m_set_event_loop.assert_called_with(loop) - - loop.close() + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + policy.get_event_loop() def test_get_event_loop_after_set_none(self): policy = asyncio.DefaultEventLoopPolicy() @@ -2599,7 +2578,8 @@ def test_new_event_loop(self): def test_set_event_loop(self): policy = asyncio.DefaultEventLoopPolicy() - old_loop = policy.get_event_loop() + old_loop = policy.new_event_loop() + policy.set_event_loop(old_loop) self.assertRaises(TypeError, policy.set_event_loop, object()) @@ -2716,15 +2696,11 @@ def get_event_loop(self): asyncio.set_event_loop_policy(Policy()) loop = asyncio.new_event_loop() - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaises(TestError): - asyncio.get_event_loop() - self.assertEqual(cm.filename, __file__) + with self.assertRaises(TestError): + asyncio.get_event_loop() asyncio.set_event_loop(None) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaises(TestError): - asyncio.get_event_loop() - self.assertEqual(cm.filename, __file__) + with self.assertRaises(TestError): + asyncio.get_event_loop() with self.assertRaisesRegex(RuntimeError, 'no running'): asyncio.get_running_loop() @@ -2738,16 +2714,11 @@ async def func(): loop.run_until_complete(func()) asyncio.set_event_loop(loop) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaises(TestError): - asyncio.get_event_loop() - self.assertEqual(cm.filename, __file__) - + with self.assertRaises(TestError): + asyncio.get_event_loop() asyncio.set_event_loop(None) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaises(TestError): - asyncio.get_event_loop() - self.assertEqual(cm.filename, __file__) + with self.assertRaises(TestError): + asyncio.get_event_loop() finally: asyncio.set_event_loop_policy(old_policy) @@ -2766,15 +2737,11 @@ def test_get_event_loop_returns_running_loop2(self): loop = asyncio.new_event_loop() self.addCleanup(loop.close) - with self.assertWarns(DeprecationWarning) as cm: - loop2 = asyncio.get_event_loop() - self.addCleanup(loop2.close) - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current'): + asyncio.get_event_loop() asyncio.set_event_loop(None) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'no current'): - asyncio.get_event_loop() - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current'): + asyncio.get_event_loop() with self.assertRaisesRegex(RuntimeError, 'no running'): asyncio.get_running_loop() @@ -2788,15 +2755,11 @@ async def func(): loop.run_until_complete(func()) asyncio.set_event_loop(loop) - with self.assertWarns(DeprecationWarning) as cm: - self.assertIs(asyncio.get_event_loop(), loop) - self.assertEqual(cm.filename, __file__) + self.assertIs(asyncio.get_event_loop(), loop) asyncio.set_event_loop(None) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'no current'): - asyncio.get_event_loop() - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current'): + asyncio.get_event_loop() finally: asyncio.set_event_loop_policy(old_policy) diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 83ea01c2452521..56b0b864de2ddf 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -146,10 +146,8 @@ def test_initial_state(self): self.assertTrue(f.cancelled()) def test_constructor_without_loop(self): - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'): - self._new_future() - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + self._new_future() def test_constructor_use_running_loop(self): async def test(): @@ -159,12 +157,10 @@ async def test(): self.assertIs(f.get_loop(), self.loop) def test_constructor_use_global_loop(self): - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 asyncio.set_event_loop(self.loop) self.addCleanup(asyncio.set_event_loop, None) - with self.assertWarns(DeprecationWarning) as cm: - f = self._new_future() - self.assertEqual(cm.filename, __file__) + f = self._new_future() self.assertIs(f._loop, self.loop) self.assertIs(f.get_loop(), self.loop) @@ -500,10 +496,8 @@ def run(arg): return (arg, threading.get_ident()) ex = concurrent.futures.ThreadPoolExecutor(1) f1 = ex.submit(run, 'oi') - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaises(RuntimeError): - asyncio.wrap_future(f1) - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + asyncio.wrap_future(f1) ex.shutdown(wait=True) def test_wrap_future_use_running_loop(self): @@ -518,16 +512,14 @@ async def test(): ex.shutdown(wait=True) def test_wrap_future_use_global_loop(self): - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 asyncio.set_event_loop(self.loop) self.addCleanup(asyncio.set_event_loop, None) def run(arg): return (arg, threading.get_ident()) ex = concurrent.futures.ThreadPoolExecutor(1) f1 = ex.submit(run, 'oi') - with self.assertWarns(DeprecationWarning) as cm: - f2 = asyncio.wrap_future(f1) - self.assertEqual(cm.filename, __file__) + f2 = asyncio.wrap_future(f1) self.assertIs(self.loop, f2._loop) ex.shutdown(wait=True) diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index ae30185cef776a..6cb7dc300c5331 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -75,7 +75,10 @@ def test_loop_reading_data(self): called_buf = bytearray(self.buffer_size) called_buf[:len(buf)] = buf self.loop._proactor.recv_into.assert_called_with(self.sock, called_buf) - self.protocol.data_received.assert_called_with(bytearray(buf)) + self.protocol.data_received.assert_called_with(buf) + # assert_called_with maps bytearray and bytes to the same thing so check manually + # regression test for https://github.com/python/cpython/issues/99941 + self.assertIsInstance(self.protocol.data_received.call_args.args[0], bytes) @unittest.skipIf(sys.flags.optimize, "Assertions are disabled in optimized mode") def test_loop_reading_no_data(self): diff --git a/Lib/test/test_asyncio/test_runners.py b/Lib/test/test_asyncio/test_runners.py index 1308b7e2ba4f82..811cf8b72488b8 100644 --- a/Lib/test/test_asyncio/test_runners.py +++ b/Lib/test/test_asyncio/test_runners.py @@ -257,6 +257,16 @@ def new_event_loop(): with self.assertRaises(asyncio.CancelledError): asyncio.run(main()) + def test_asyncio_run_loop_factory(self): + factory = mock.Mock() + loop = factory.return_value = self.new_loop() + + async def main(): + self.assertEqual(asyncio.get_running_loop(), loop) + + asyncio.run(main(), loop_factory=factory) + factory.assert_called_once_with() + class RunnerTests(BaseTest): diff --git a/Lib/test/test_asyncio/test_server.py b/Lib/test/test_asyncio/test_server.py index 860d62d52ef1ea..06d8b60f219f1a 100644 --- a/Lib/test/test_asyncio/test_server.py +++ b/Lib/test/test_asyncio/test_server.py @@ -120,6 +120,33 @@ async def main(srv): self.loop.run_until_complete(srv.serve_forever()) +class TestServer2(unittest.IsolatedAsyncioTestCase): + + async def test_wait_closed(self): + async def serve(*args): + pass + + srv = await asyncio.start_server(serve, socket_helper.HOSTv4, 0) + + # active count = 0 + task1 = asyncio.create_task(srv.wait_closed()) + await asyncio.sleep(0) + self.assertTrue(task1.done()) + + # active count != 0 + srv._attach() + task2 = asyncio.create_task(srv.wait_closed()) + await asyncio.sleep(0) + self.assertFalse(task2.done()) + + srv.close() + await asyncio.sleep(0) + self.assertFalse(task2.done()) + + srv._detach() + await task2 + + @unittest.skipUnless(hasattr(asyncio, 'ProactorEventLoop'), 'Windows only') class ProactorStartServerTests(BaseStartServer, unittest.TestCase): diff --git a/Lib/test/test_asyncio/test_ssl.py b/Lib/test/test_asyncio/test_ssl.py index 5de9b7a14e87da..aaf3c37101f52a 100644 --- a/Lib/test/test_asyncio/test_ssl.py +++ b/Lib/test/test_asyncio/test_ssl.py @@ -1689,7 +1689,7 @@ def stop(self): def run(self): try: with self._sock: - self._sock.setblocking(0) + self._sock.setblocking(False) self._run() finally: self._s1.close() diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index 01d5407a497a04..7f9dc621808358 100644 --- a/Lib/test/test_asyncio/test_streams.py +++ b/Lib/test/test_asyncio/test_streams.py @@ -816,10 +816,8 @@ def test_read_all_from_pipe_reader(self): self.assertEqual(data, b'data') def test_streamreader_constructor_without_loop(self): - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'): - asyncio.StreamReader() - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + asyncio.StreamReader() def test_streamreader_constructor_use_running_loop(self): # asyncio issue #184: Ensure that StreamReaderProtocol constructor @@ -833,21 +831,17 @@ async def test(): def test_streamreader_constructor_use_global_loop(self): # asyncio issue #184: Ensure that StreamReaderProtocol constructor # retrieves the current loop if the loop parameter is not set - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 self.addCleanup(asyncio.set_event_loop, None) asyncio.set_event_loop(self.loop) - with self.assertWarns(DeprecationWarning) as cm: - reader = asyncio.StreamReader() - self.assertEqual(cm.filename, __file__) + reader = asyncio.StreamReader() self.assertIs(reader._loop, self.loop) def test_streamreaderprotocol_constructor_without_loop(self): reader = mock.Mock() - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'): - asyncio.StreamReaderProtocol(reader) - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + asyncio.StreamReaderProtocol(reader) def test_streamreaderprotocol_constructor_use_running_loop(self): # asyncio issue #184: Ensure that StreamReaderProtocol constructor @@ -861,13 +855,11 @@ async def test(): def test_streamreaderprotocol_constructor_use_global_loop(self): # asyncio issue #184: Ensure that StreamReaderProtocol constructor # retrieves the current loop if the loop parameter is not set - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 self.addCleanup(asyncio.set_event_loop, None) asyncio.set_event_loop(self.loop) reader = mock.Mock() - with self.assertWarns(DeprecationWarning) as cm: - protocol = asyncio.StreamReaderProtocol(reader) - self.assertEqual(cm.filename, __file__) + protocol = asyncio.StreamReaderProtocol(reader) self.assertIs(protocol._loop, self.loop) def test_multiple_drain(self): diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index 20bca3e6b72736..7411f735da3be6 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -185,7 +185,7 @@ def test_kill(self): def test_kill_issue43884(self): if sys.platform == 'win32': - blocking_shell_command = f'{sys.executable} -c "import time; time.sleep(100000000)"' + blocking_shell_command = f'{sys.executable} -c "import time; time.sleep(2)"' else: blocking_shell_command = 'sleep 1; sleep 1' creationflags = 0 diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 0e38e6a92e52b7..5168b8250ef0a2 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -196,10 +196,8 @@ async def notmuch(): a = notmuch() self.addCleanup(a.close) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'): - asyncio.ensure_future(a) - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + asyncio.ensure_future(a) async def test(): return asyncio.ensure_future(notmuch()) @@ -209,12 +207,10 @@ async def test(): self.assertTrue(t.done()) self.assertEqual(t.result(), 'ok') - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 asyncio.set_event_loop(self.loop) self.addCleanup(asyncio.set_event_loop, None) - with self.assertWarns(DeprecationWarning) as cm: - t = asyncio.ensure_future(notmuch()) - self.assertEqual(cm.filename, __file__) + t = asyncio.ensure_future(notmuch()) self.assertIs(t._loop, self.loop) self.loop.run_until_complete(t) self.assertTrue(t.done()) @@ -635,27 +631,30 @@ def on_timeout(): await asyncio.sleep(0) return timed_out, structured_block_finished, outer_code_reached - # Test which timed out. - t1 = self.new_task(loop, make_request_with_timeout(sleep=10.0, timeout=0.1)) - timed_out, structured_block_finished, outer_code_reached = ( - loop.run_until_complete(t1) - ) - self.assertTrue(timed_out) - self.assertFalse(structured_block_finished) # it was cancelled - self.assertTrue(outer_code_reached) # task got uncancelled after leaving - # the structured block and continued until - # completion - self.assertEqual(t1.cancelling(), 0) # no pending cancellation of the outer task - - # Test which did not time out. - t2 = self.new_task(loop, make_request_with_timeout(sleep=0, timeout=10.0)) - timed_out, structured_block_finished, outer_code_reached = ( - loop.run_until_complete(t2) - ) - self.assertFalse(timed_out) - self.assertTrue(structured_block_finished) - self.assertTrue(outer_code_reached) - self.assertEqual(t2.cancelling(), 0) + try: + # Test which timed out. + t1 = self.new_task(loop, make_request_with_timeout(sleep=10.0, timeout=0.1)) + timed_out, structured_block_finished, outer_code_reached = ( + loop.run_until_complete(t1) + ) + self.assertTrue(timed_out) + self.assertFalse(structured_block_finished) # it was cancelled + self.assertTrue(outer_code_reached) # task got uncancelled after leaving + # the structured block and continued until + # completion + self.assertEqual(t1.cancelling(), 0) # no pending cancellation of the outer task + + # Test which did not time out. + t2 = self.new_task(loop, make_request_with_timeout(sleep=0, timeout=10.0)) + timed_out, structured_block_finished, outer_code_reached = ( + loop.run_until_complete(t2) + ) + self.assertFalse(timed_out) + self.assertTrue(structured_block_finished) + self.assertTrue(outer_code_reached) + self.assertEqual(t2.cancelling(), 0) + finally: + loop.close() def test_cancel(self): @@ -1529,10 +1528,8 @@ async def coro(): self.addCleanup(a.close) futs = asyncio.as_completed([a]) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'): - list(futs) - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + list(futs) def test_as_completed_coroutine_use_running_loop(self): loop = self.new_test_loop() @@ -1962,10 +1959,8 @@ async def coro(): inner = coro() self.addCleanup(inner.close) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'): - asyncio.shield(inner) - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + asyncio.shield(inner) def test_shield_coroutine_use_running_loop(self): async def coro(): @@ -1979,15 +1974,13 @@ async def test(): self.assertEqual(res, 42) def test_shield_coroutine_use_global_loop(self): - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 async def coro(): return 42 asyncio.set_event_loop(self.loop) self.addCleanup(asyncio.set_event_loop, None) - with self.assertWarns(DeprecationWarning) as cm: - outer = asyncio.shield(coro()) - self.assertEqual(cm.filename, __file__) + outer = asyncio.shield(coro()) self.assertEqual(outer._loop, self.loop) res = self.loop.run_until_complete(outer) self.assertEqual(res, 42) @@ -2099,8 +2092,8 @@ def test_cancel_gather_1(self): async def create(): # The indirection fut->child_coro is needed since otherwise the # gathering task is done at the same time as the child future - def child_coro(): - return (yield from fut) + async def child_coro(): + return await fut gather_future = asyncio.gather(child_coro()) return asyncio.ensure_future(gather_future) gather_task = loop.run_until_complete(create()) @@ -2824,7 +2817,7 @@ def test_current_task_no_running_loop(self): self.assertIsNone(asyncio.current_task(loop=self.loop)) def test_current_task_no_running_loop_implicit(self): - with self.assertRaises(RuntimeError): + with self.assertRaisesRegex(RuntimeError, 'no running event loop'): asyncio.current_task() def test_current_task_with_implicit_loop(self): @@ -2988,10 +2981,8 @@ def _gather(self, *args, **kwargs): return asyncio.gather(*args, **kwargs) def test_constructor_empty_sequence_without_loop(self): - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaises(RuntimeError): - asyncio.gather() - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + asyncio.gather() def test_constructor_empty_sequence_use_running_loop(self): async def gather(): @@ -3004,12 +2995,10 @@ async def gather(): self.assertEqual(fut.result(), []) def test_constructor_empty_sequence_use_global_loop(self): - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 asyncio.set_event_loop(self.one_loop) self.addCleanup(asyncio.set_event_loop, None) - with self.assertWarns(DeprecationWarning) as cm: - fut = asyncio.gather() - self.assertEqual(cm.filename, __file__) + fut = asyncio.gather() self.assertIsInstance(fut, asyncio.Future) self.assertIs(fut._loop, self.one_loop) self._run_loop(self.one_loop) @@ -3097,10 +3086,8 @@ async def coro(): self.addCleanup(gen1.close) gen2 = coro() self.addCleanup(gen2.close) - with self.assertWarns(DeprecationWarning) as cm: - with self.assertRaises(RuntimeError): - asyncio.gather(gen1, gen2) - self.assertEqual(cm.filename, __file__) + with self.assertRaisesRegex(RuntimeError, 'no current event loop'): + asyncio.gather(gen1, gen2) def test_constructor_use_running_loop(self): async def coro(): @@ -3114,16 +3101,14 @@ async def gather(): self.one_loop.run_until_complete(fut) def test_constructor_use_global_loop(self): - # Deprecated in 3.10 + # Deprecated in 3.10, undeprecated in 3.12 async def coro(): return 'abc' asyncio.set_event_loop(self.other_loop) self.addCleanup(asyncio.set_event_loop, None) gen1 = coro() gen2 = coro() - with self.assertWarns(DeprecationWarning) as cm: - fut = asyncio.gather(gen1, gen2) - self.assertEqual(cm.filename, __file__) + fut = asyncio.gather(gen1, gen2) self.assertIs(fut._loop, self.other_loop) self.other_loop.run_until_complete(fut) diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index d806ed497aaab4..600a5900da088d 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -3,6 +3,7 @@ import contextlib import errno import io +import multiprocessing import os import pathlib import signal @@ -15,6 +16,8 @@ import warnings from test.support import os_helper from test.support import socket_helper +from test.support import wait_process +from test.support import hashlib_helper if sys.platform == 'win32': raise unittest.SkipTest('UNIX only') @@ -1108,6 +1111,11 @@ def test_write_eof_pending(self): class AbstractChildWatcherTests(unittest.TestCase): + def test_warns_on_subclassing(self): + with self.assertWarns(DeprecationWarning): + class MyWatcher(asyncio.AbstractChildWatcher): + pass + def test_not_implemented(self): f = mock.Mock() watcher = asyncio.AbstractChildWatcher() @@ -1747,7 +1755,9 @@ def f(): self.assertIsInstance(policy.get_event_loop(), asyncio.AbstractEventLoop) - watcher = policy.get_child_watcher() + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.SafeChildWatcher) self.assertIsNone(watcher._loop) @@ -1765,7 +1775,8 @@ def f(): def test_child_watcher_replace_mainloop_existing(self): policy = self.create_policy() - loop = policy.get_event_loop() + loop = policy.new_event_loop() + policy.set_event_loop(loop) # Explicitly setup SafeChildWatcher, # default ThreadedChildWatcher has no _loop property @@ -1860,5 +1871,101 @@ async def runner(): wsock.close() +@unittest.skipUnless(hasattr(os, 'fork'), 'requires os.fork()') +class TestFork(unittest.IsolatedAsyncioTestCase): + + async def test_fork_not_share_event_loop(self): + # The forked process should not share the event loop with the parent + loop = asyncio.get_running_loop() + r, w = os.pipe() + self.addCleanup(os.close, r) + self.addCleanup(os.close, w) + pid = os.fork() + if pid == 0: + # child + try: + loop = asyncio.get_event_loop_policy().get_event_loop() + except RuntimeError: + os.write(w, b'NO LOOP') + except: + os.write(w, b'ERROR:' + ascii(sys.exc_info()).encode()) + finally: + os._exit(0) + else: + # parent + self.assertEqual(os.read(r, 100), b'NO LOOP') + wait_process(pid, exitcode=0) + + @hashlib_helper.requires_hashdigest('md5') + def test_fork_signal_handling(self): + # Sending signal to the forked process should not affect the parent + # process + ctx = multiprocessing.get_context('fork') + manager = ctx.Manager() + self.addCleanup(manager.shutdown) + child_started = manager.Event() + child_handled = manager.Event() + parent_handled = manager.Event() + + def child_main(): + signal.signal(signal.SIGTERM, lambda *args: child_handled.set()) + child_started.set() + + async def main(): + loop = asyncio.get_running_loop() + loop.add_signal_handler(signal.SIGTERM, lambda *args: parent_handled.set()) + + process = ctx.Process(target=child_main) + process.start() + child_started.wait() + os.kill(process.pid, signal.SIGTERM) + process.join() + + async def func(): + await asyncio.sleep(0.1) + return 42 + + # Test parent's loop is still functional + self.assertEqual(await asyncio.create_task(func()), 42) + + asyncio.run(main()) + + self.assertFalse(parent_handled.is_set()) + self.assertTrue(child_handled.is_set()) + + @hashlib_helper.requires_hashdigest('md5') + def test_fork_asyncio_run(self): + ctx = multiprocessing.get_context('fork') + manager = ctx.Manager() + self.addCleanup(manager.shutdown) + result = manager.Value('i', 0) + + async def child_main(): + await asyncio.sleep(0.1) + result.value = 42 + + process = ctx.Process(target=lambda: asyncio.run(child_main())) + process.start() + process.join() + + self.assertEqual(result.value, 42) + + @hashlib_helper.requires_hashdigest('md5') + def test_fork_asyncio_subprocess(self): + ctx = multiprocessing.get_context('fork') + manager = ctx.Manager() + self.addCleanup(manager.shutdown) + result = manager.Value('i', 1) + + async def child_main(): + proc = await asyncio.create_subprocess_exec(sys.executable, '-c', 'pass') + result.value = await proc.wait() + + process = ctx.Process(target=lambda: asyncio.run(child_main())) + process.start() + process.join() + + self.assertEqual(result.value, 0) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py deleted file mode 100644 index 5a037fb9c5d595..00000000000000 --- a/Lib/test/test_asyncore.py +++ /dev/null @@ -1,840 +0,0 @@ -import unittest -import select -import os -import socket -import sys -import time -import errno -import struct -import threading - -from test import support -from test.support import os_helper -from test.support import socket_helper -from test.support import threading_helper -from test.support import warnings_helper -from io import BytesIO - -if support.PGO: - raise unittest.SkipTest("test is not helpful for PGO") - -support.requires_working_socket(module=True) - -asyncore = warnings_helper.import_deprecated('asyncore') - - -HAS_UNIX_SOCKETS = hasattr(socket, 'AF_UNIX') - -class dummysocket: - def __init__(self): - self.closed = False - - def close(self): - self.closed = True - - def fileno(self): - return 42 - -class dummychannel: - def __init__(self): - self.socket = dummysocket() - - def close(self): - self.socket.close() - -class exitingdummy: - def __init__(self): - pass - - def handle_read_event(self): - raise asyncore.ExitNow() - - handle_write_event = handle_read_event - handle_close = handle_read_event - handle_expt_event = handle_read_event - -class crashingdummy: - def __init__(self): - self.error_handled = False - - def handle_read_event(self): - raise Exception() - - handle_write_event = handle_read_event - handle_close = handle_read_event - handle_expt_event = handle_read_event - - def handle_error(self): - self.error_handled = True - -# used when testing senders; just collects what it gets until newline is sent -def capture_server(evt, buf, serv): - try: - serv.listen() - conn, addr = serv.accept() - except TimeoutError: - pass - else: - n = 200 - for _ in support.busy_retry(support.SHORT_TIMEOUT): - r, w, e = select.select([conn], [], [], 0.1) - if r: - n -= 1 - data = conn.recv(10) - # keep everything except for the newline terminator - buf.write(data.replace(b'\n', b'')) - if b'\n' in data: - break - if n <= 0: - break - - conn.close() - finally: - serv.close() - evt.set() - -def bind_af_aware(sock, addr): - """Helper function to bind a socket according to its family.""" - if HAS_UNIX_SOCKETS and sock.family == socket.AF_UNIX: - # Make sure the path doesn't exist. - os_helper.unlink(addr) - socket_helper.bind_unix_socket(sock, addr) - else: - sock.bind(addr) - - -class HelperFunctionTests(unittest.TestCase): - def test_readwriteexc(self): - # Check exception handling behavior of read, write and _exception - - # check that ExitNow exceptions in the object handler method - # bubbles all the way up through asyncore read/write/_exception calls - tr1 = exitingdummy() - self.assertRaises(asyncore.ExitNow, asyncore.read, tr1) - self.assertRaises(asyncore.ExitNow, asyncore.write, tr1) - self.assertRaises(asyncore.ExitNow, asyncore._exception, tr1) - - # check that an exception other than ExitNow in the object handler - # method causes the handle_error method to get called - tr2 = crashingdummy() - asyncore.read(tr2) - self.assertEqual(tr2.error_handled, True) - - tr2 = crashingdummy() - asyncore.write(tr2) - self.assertEqual(tr2.error_handled, True) - - tr2 = crashingdummy() - asyncore._exception(tr2) - self.assertEqual(tr2.error_handled, True) - - # asyncore.readwrite uses constants in the select module that - # are not present in Windows systems (see this thread: - # http://mail.python.org/pipermail/python-list/2001-October/109973.html) - # These constants should be present as long as poll is available - - @unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') - def test_readwrite(self): - # Check that correct methods are called by readwrite() - - attributes = ('read', 'expt', 'write', 'closed', 'error_handled') - - expected = ( - (select.POLLIN, 'read'), - (select.POLLPRI, 'expt'), - (select.POLLOUT, 'write'), - (select.POLLERR, 'closed'), - (select.POLLHUP, 'closed'), - (select.POLLNVAL, 'closed'), - ) - - class testobj: - def __init__(self): - self.read = False - self.write = False - self.closed = False - self.expt = False - self.error_handled = False - - def handle_read_event(self): - self.read = True - - def handle_write_event(self): - self.write = True - - def handle_close(self): - self.closed = True - - def handle_expt_event(self): - self.expt = True - - def handle_error(self): - self.error_handled = True - - for flag, expectedattr in expected: - tobj = testobj() - self.assertEqual(getattr(tobj, expectedattr), False) - asyncore.readwrite(tobj, flag) - - # Only the attribute modified by the routine we expect to be - # called should be True. - for attr in attributes: - self.assertEqual(getattr(tobj, attr), attr==expectedattr) - - # check that ExitNow exceptions in the object handler method - # bubbles all the way up through asyncore readwrite call - tr1 = exitingdummy() - self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) - - # check that an exception other than ExitNow in the object handler - # method causes the handle_error method to get called - tr2 = crashingdummy() - self.assertEqual(tr2.error_handled, False) - asyncore.readwrite(tr2, flag) - self.assertEqual(tr2.error_handled, True) - - def test_closeall(self): - self.closeall_check(False) - - def test_closeall_default(self): - self.closeall_check(True) - - def closeall_check(self, usedefault): - # Check that close_all() closes everything in a given map - - l = [] - testmap = {} - for i in range(10): - c = dummychannel() - l.append(c) - self.assertEqual(c.socket.closed, False) - testmap[i] = c - - if usedefault: - socketmap = asyncore.socket_map - try: - asyncore.socket_map = testmap - asyncore.close_all() - finally: - testmap, asyncore.socket_map = asyncore.socket_map, socketmap - else: - asyncore.close_all(testmap) - - self.assertEqual(len(testmap), 0) - - for c in l: - self.assertEqual(c.socket.closed, True) - - def test_compact_traceback(self): - try: - raise Exception("I don't like spam!") - except: - real_t, real_v, real_tb = sys.exc_info() - r = asyncore.compact_traceback() - else: - self.fail("Expected exception") - - (f, function, line), t, v, info = r - self.assertEqual(os.path.split(f)[-1], 'test_asyncore.py') - self.assertEqual(function, 'test_compact_traceback') - self.assertEqual(t, real_t) - self.assertEqual(v, real_v) - self.assertEqual(info, '[%s|%s|%s]' % (f, function, line)) - - -class DispatcherTests(unittest.TestCase): - def setUp(self): - pass - - def tearDown(self): - asyncore.close_all() - - def test_basic(self): - d = asyncore.dispatcher() - self.assertEqual(d.readable(), True) - self.assertEqual(d.writable(), True) - - def test_repr(self): - d = asyncore.dispatcher() - self.assertEqual(repr(d), '<asyncore.dispatcher at %#x>' % id(d)) - - def test_log(self): - d = asyncore.dispatcher() - - # capture output of dispatcher.log() (to stderr) - l1 = "Lovely spam! Wonderful spam!" - l2 = "I don't like spam!" - with support.captured_stderr() as stderr: - d.log(l1) - d.log(l2) - - lines = stderr.getvalue().splitlines() - self.assertEqual(lines, ['log: %s' % l1, 'log: %s' % l2]) - - def test_log_info(self): - d = asyncore.dispatcher() - - # capture output of dispatcher.log_info() (to stdout via print) - l1 = "Have you got anything without spam?" - l2 = "Why can't she have egg bacon spam and sausage?" - l3 = "THAT'S got spam in it!" - with support.captured_stdout() as stdout: - d.log_info(l1, 'EGGS') - d.log_info(l2) - d.log_info(l3, 'SPAM') - - lines = stdout.getvalue().splitlines() - expected = ['EGGS: %s' % l1, 'info: %s' % l2, 'SPAM: %s' % l3] - self.assertEqual(lines, expected) - - def test_unhandled(self): - d = asyncore.dispatcher() - d.ignore_log_types = () - - # capture output of dispatcher.log_info() (to stdout via print) - with support.captured_stdout() as stdout: - d.handle_expt() - d.handle_read() - d.handle_write() - d.handle_connect() - - lines = stdout.getvalue().splitlines() - expected = ['warning: unhandled incoming priority event', - 'warning: unhandled read event', - 'warning: unhandled write event', - 'warning: unhandled connect event'] - self.assertEqual(lines, expected) - - def test_strerror(self): - # refers to bug #8573 - err = asyncore._strerror(errno.EPERM) - if hasattr(os, 'strerror'): - self.assertEqual(err, os.strerror(errno.EPERM)) - err = asyncore._strerror(-1) - self.assertTrue(err != "") - - -class dispatcherwithsend_noread(asyncore.dispatcher_with_send): - def readable(self): - return False - - def handle_connect(self): - pass - - -class DispatcherWithSendTests(unittest.TestCase): - def setUp(self): - pass - - def tearDown(self): - asyncore.close_all() - - @threading_helper.reap_threads - def test_send(self): - evt = threading.Event() - sock = socket.socket() - sock.settimeout(3) - port = socket_helper.bind_port(sock) - - cap = BytesIO() - args = (evt, cap, sock) - t = threading.Thread(target=capture_server, args=args) - t.start() - try: - # wait a little longer for the server to initialize (it sometimes - # refuses connections on slow machines without this wait) - time.sleep(0.2) - - data = b"Suppose there isn't a 16-ton weight?" - d = dispatcherwithsend_noread() - d.create_socket() - d.connect((socket_helper.HOST, port)) - - # give time for socket to connect - time.sleep(0.1) - - d.send(data) - d.send(data) - d.send(b'\n') - - n = 1000 - while d.out_buffer and n > 0: - asyncore.poll() - n -= 1 - - evt.wait() - - self.assertEqual(cap.getvalue(), data*2) - finally: - threading_helper.join_thread(t) - - -@unittest.skipUnless(hasattr(asyncore, 'file_wrapper'), - 'asyncore.file_wrapper required') -class FileWrapperTest(unittest.TestCase): - def setUp(self): - self.d = b"It's not dead, it's sleeping!" - with open(os_helper.TESTFN, 'wb') as file: - file.write(self.d) - - def tearDown(self): - os_helper.unlink(os_helper.TESTFN) - - def test_recv(self): - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - w = asyncore.file_wrapper(fd) - os.close(fd) - - self.assertNotEqual(w.fd, fd) - self.assertNotEqual(w.fileno(), fd) - self.assertEqual(w.recv(13), b"It's not dead") - self.assertEqual(w.read(6), b", it's") - w.close() - self.assertRaises(OSError, w.read, 1) - - def test_send(self): - d1 = b"Come again?" - d2 = b"I want to buy some cheese." - fd = os.open(os_helper.TESTFN, os.O_WRONLY | os.O_APPEND) - w = asyncore.file_wrapper(fd) - os.close(fd) - - w.write(d1) - w.send(d2) - w.close() - with open(os_helper.TESTFN, 'rb') as file: - self.assertEqual(file.read(), self.d + d1 + d2) - - @unittest.skipUnless(hasattr(asyncore, 'file_dispatcher'), - 'asyncore.file_dispatcher required') - def test_dispatcher(self): - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - data = [] - class FileDispatcher(asyncore.file_dispatcher): - def handle_read(self): - data.append(self.recv(29)) - s = FileDispatcher(fd) - os.close(fd) - asyncore.loop(timeout=0.01, use_poll=True, count=2) - self.assertEqual(b"".join(data), self.d) - - def test_resource_warning(self): - # Issue #11453 - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - f = asyncore.file_wrapper(fd) - - os.close(fd) - with warnings_helper.check_warnings(('', ResourceWarning)): - f = None - support.gc_collect() - - def test_close_twice(self): - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - f = asyncore.file_wrapper(fd) - os.close(fd) - - os.close(f.fd) # file_wrapper dupped fd - with self.assertRaises(OSError): - f.close() - - self.assertEqual(f.fd, -1) - # calling close twice should not fail - f.close() - - -class BaseTestHandler(asyncore.dispatcher): - - def __init__(self, sock=None): - asyncore.dispatcher.__init__(self, sock) - self.flag = False - - def handle_accept(self): - raise Exception("handle_accept not supposed to be called") - - def handle_accepted(self): - raise Exception("handle_accepted not supposed to be called") - - def handle_connect(self): - raise Exception("handle_connect not supposed to be called") - - def handle_expt(self): - raise Exception("handle_expt not supposed to be called") - - def handle_close(self): - raise Exception("handle_close not supposed to be called") - - def handle_error(self): - raise - - -class BaseServer(asyncore.dispatcher): - """A server which listens on an address and dispatches the - connection to a handler. - """ - - def __init__(self, family, addr, handler=BaseTestHandler): - asyncore.dispatcher.__init__(self) - self.create_socket(family) - self.set_reuse_addr() - bind_af_aware(self.socket, addr) - self.listen(5) - self.handler = handler - - @property - def address(self): - return self.socket.getsockname() - - def handle_accepted(self, sock, addr): - self.handler(sock) - - def handle_error(self): - raise - - -class BaseClient(BaseTestHandler): - - def __init__(self, family, address): - BaseTestHandler.__init__(self) - self.create_socket(family) - self.connect(address) - - def handle_connect(self): - pass - - -class BaseTestAPI: - - def tearDown(self): - asyncore.close_all(ignore_all=True) - - def loop_waiting_for_flag(self, instance, timeout=5): - timeout = float(timeout) / 100 - count = 100 - while asyncore.socket_map and count > 0: - asyncore.loop(timeout=0.01, count=1, use_poll=self.use_poll) - if instance.flag: - return - count -= 1 - time.sleep(timeout) - self.fail("flag not set") - - def test_handle_connect(self): - # make sure handle_connect is called on connect() - - class TestClient(BaseClient): - def handle_connect(self): - self.flag = True - - server = BaseServer(self.family, self.addr) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_accept(self): - # make sure handle_accept() is called when a client connects - - class TestListener(BaseTestHandler): - - def __init__(self, family, addr): - BaseTestHandler.__init__(self) - self.create_socket(family) - bind_af_aware(self.socket, addr) - self.listen(5) - self.address = self.socket.getsockname() - - def handle_accept(self): - self.flag = True - - server = TestListener(self.family, self.addr) - client = BaseClient(self.family, server.address) - self.loop_waiting_for_flag(server) - - def test_handle_accepted(self): - # make sure handle_accepted() is called when a client connects - - class TestListener(BaseTestHandler): - - def __init__(self, family, addr): - BaseTestHandler.__init__(self) - self.create_socket(family) - bind_af_aware(self.socket, addr) - self.listen(5) - self.address = self.socket.getsockname() - - def handle_accept(self): - asyncore.dispatcher.handle_accept(self) - - def handle_accepted(self, sock, addr): - sock.close() - self.flag = True - - server = TestListener(self.family, self.addr) - client = BaseClient(self.family, server.address) - self.loop_waiting_for_flag(server) - - - def test_handle_read(self): - # make sure handle_read is called on data received - - class TestClient(BaseClient): - def handle_read(self): - self.flag = True - - class TestHandler(BaseTestHandler): - def __init__(self, conn): - BaseTestHandler.__init__(self, conn) - self.send(b'x' * 1024) - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_write(self): - # make sure handle_write is called - - class TestClient(BaseClient): - def handle_write(self): - self.flag = True - - server = BaseServer(self.family, self.addr) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_close(self): - # make sure handle_close is called when the other end closes - # the connection - - class TestClient(BaseClient): - - def handle_read(self): - # in order to make handle_close be called we are supposed - # to make at least one recv() call - self.recv(1024) - - def handle_close(self): - self.flag = True - self.close() - - class TestHandler(BaseTestHandler): - def __init__(self, conn): - BaseTestHandler.__init__(self, conn) - self.close() - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_close_after_conn_broken(self): - # Check that ECONNRESET/EPIPE is correctly handled (issues #5661 and - # #11265). - - data = b'\0' * 128 - - class TestClient(BaseClient): - - def handle_write(self): - self.send(data) - - def handle_close(self): - self.flag = True - self.close() - - def handle_expt(self): - self.flag = True - self.close() - - class TestHandler(BaseTestHandler): - - def handle_read(self): - self.recv(len(data)) - self.close() - - def writable(self): - return False - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - @unittest.skipIf(sys.platform.startswith("sunos"), - "OOB support is broken on Solaris") - def test_handle_expt(self): - # Make sure handle_expt is called on OOB data received. - # Note: this might fail on some platforms as OOB data is - # tenuously supported and rarely used. - if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX: - self.skipTest("Not applicable to AF_UNIX sockets.") - - if sys.platform == "darwin" and self.use_poll: - self.skipTest("poll may fail on macOS; see issue #28087") - - class TestClient(BaseClient): - def handle_expt(self): - self.socket.recv(1024, socket.MSG_OOB) - self.flag = True - - class TestHandler(BaseTestHandler): - def __init__(self, conn): - BaseTestHandler.__init__(self, conn) - self.socket.send(bytes(chr(244), 'latin-1'), socket.MSG_OOB) - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_error(self): - - class TestClient(BaseClient): - def handle_write(self): - 1.0 / 0 - def handle_error(self): - self.flag = True - try: - raise - except ZeroDivisionError: - pass - else: - raise Exception("exception not raised") - - server = BaseServer(self.family, self.addr) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_connection_attributes(self): - server = BaseServer(self.family, self.addr) - client = BaseClient(self.family, server.address) - - # we start disconnected - self.assertFalse(server.connected) - self.assertTrue(server.accepting) - # this can't be taken for granted across all platforms - #self.assertFalse(client.connected) - self.assertFalse(client.accepting) - - # execute some loops so that client connects to server - asyncore.loop(timeout=0.01, use_poll=self.use_poll, count=100) - self.assertFalse(server.connected) - self.assertTrue(server.accepting) - self.assertTrue(client.connected) - self.assertFalse(client.accepting) - - # disconnect the client - client.close() - self.assertFalse(server.connected) - self.assertTrue(server.accepting) - self.assertFalse(client.connected) - self.assertFalse(client.accepting) - - # stop serving - server.close() - self.assertFalse(server.connected) - self.assertFalse(server.accepting) - - def test_create_socket(self): - s = asyncore.dispatcher() - s.create_socket(self.family) - self.assertEqual(s.socket.type, socket.SOCK_STREAM) - self.assertEqual(s.socket.family, self.family) - self.assertEqual(s.socket.gettimeout(), 0) - self.assertFalse(s.socket.get_inheritable()) - - def test_bind(self): - if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX: - self.skipTest("Not applicable to AF_UNIX sockets.") - s1 = asyncore.dispatcher() - s1.create_socket(self.family) - s1.bind(self.addr) - s1.listen(5) - port = s1.socket.getsockname()[1] - - s2 = asyncore.dispatcher() - s2.create_socket(self.family) - # EADDRINUSE indicates the socket was correctly bound - self.assertRaises(OSError, s2.bind, (self.addr[0], port)) - - def test_set_reuse_addr(self): - if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX: - self.skipTest("Not applicable to AF_UNIX sockets.") - - with socket.socket(self.family) as sock: - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - except OSError: - unittest.skip("SO_REUSEADDR not supported on this platform") - else: - # if SO_REUSEADDR succeeded for sock we expect asyncore - # to do the same - s = asyncore.dispatcher(socket.socket(self.family)) - self.assertFalse(s.socket.getsockopt(socket.SOL_SOCKET, - socket.SO_REUSEADDR)) - s.socket.close() - s.create_socket(self.family) - s.set_reuse_addr() - self.assertTrue(s.socket.getsockopt(socket.SOL_SOCKET, - socket.SO_REUSEADDR)) - - @threading_helper.reap_threads - def test_quick_connect(self): - # see: http://bugs.python.org/issue10340 - if self.family not in (socket.AF_INET, getattr(socket, "AF_INET6", object())): - self.skipTest("test specific to AF_INET and AF_INET6") - - server = BaseServer(self.family, self.addr) - # run the thread 500 ms: the socket should be connected in 200 ms - t = threading.Thread(target=lambda: asyncore.loop(timeout=0.1, - count=5)) - t.start() - try: - with socket.socket(self.family, socket.SOCK_STREAM) as s: - s.settimeout(.2) - s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, - struct.pack('ii', 1, 0)) - - try: - s.connect(server.address) - except OSError: - pass - finally: - threading_helper.join_thread(t) - -class TestAPI_UseIPv4Sockets(BaseTestAPI): - family = socket.AF_INET - addr = (socket_helper.HOST, 0) - -@unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 support required') -class TestAPI_UseIPv6Sockets(BaseTestAPI): - family = socket.AF_INET6 - addr = (socket_helper.HOSTv6, 0) - -@unittest.skipUnless(HAS_UNIX_SOCKETS, 'Unix sockets required') -class TestAPI_UseUnixSockets(BaseTestAPI): - if HAS_UNIX_SOCKETS: - family = socket.AF_UNIX - addr = os_helper.TESTFN - - def tearDown(self): - os_helper.unlink(self.addr) - BaseTestAPI.tearDown(self) - -class TestAPI_UseIPv4Select(TestAPI_UseIPv4Sockets, unittest.TestCase): - use_poll = False - -@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') -class TestAPI_UseIPv4Poll(TestAPI_UseIPv4Sockets, unittest.TestCase): - use_poll = True - -class TestAPI_UseIPv6Select(TestAPI_UseIPv6Sockets, unittest.TestCase): - use_poll = False - -@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') -class TestAPI_UseIPv6Poll(TestAPI_UseIPv6Sockets, unittest.TestCase): - use_poll = True - -class TestAPI_UseUnixSocketsSelect(TestAPI_UseUnixSockets, unittest.TestCase): - use_poll = False - -@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') -class TestAPI_UseUnixSocketsPoll(TestAPI_UseUnixSockets, unittest.TestCase): - use_poll = True - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_audit.py b/Lib/test/test_audit.py index 50f78f2a6b8a46..70f8a77a4761a7 100644 --- a/Lib/test/test_audit.py +++ b/Lib/test/test_audit.py @@ -186,6 +186,23 @@ def test_sys_getframe(self): self.assertEqual(actual, expected) + + def test_threading(self): + returncode, events, stderr = self.run_python("test_threading") + if returncode: + self.fail(stderr) + + if support.verbose: + print(*events, sep='\n') + actual = [(ev[0], ev[2]) for ev in events] + expected = [ + ("_thread.start_new_thread", "(<test_func>, (), None)"), + ("test.test_func", "()"), + ] + + self.assertEqual(actual, expected) + + def test_wmi_exec_query(self): import_helper.import_module("_wmi") returncode, events, stderr = self.run_python("test_wmi_exec_query") @@ -222,6 +239,11 @@ def test_syslog(self): ('syslog.closelog', '', '')] ) + def test_not_in_gc(self): + returncode, _, stderr = self.run_python("test_not_in_gc") + if returncode: + self.fail(stderr) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 31e50638d4a7c1..eb1c389257cc4b 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -159,7 +159,7 @@ def test_import(self): __import__('string') __import__(name='sys') __import__(name='time', level=0) - self.assertRaises(ImportError, __import__, 'spamspam') + self.assertRaises(ModuleNotFoundError, __import__, 'spamspam') self.assertRaises(TypeError, __import__, 1, 2, 3, 4) self.assertRaises(ValueError, __import__, '') self.assertRaises(TypeError, __import__, 'sys', name='sys') @@ -736,6 +736,7 @@ def test_exec_globals(self): self.assertRaises(TypeError, exec, code, {'__builtins__': 123}) + def test_exec_globals_frozen(self): class frozendict_error(Exception): pass @@ -767,6 +768,36 @@ def __setitem__(self, key, value): self.assertRaises(frozendict_error, exec, code, namespace) + def test_exec_globals_error_on_get(self): + # custom `globals` or `builtins` can raise errors on item access + class setonlyerror(Exception): + pass + + class setonlydict(dict): + def __getitem__(self, key): + raise setonlyerror + + # globals' `__getitem__` raises + code = compile("globalname", "test", "exec") + self.assertRaises(setonlyerror, + exec, code, setonlydict({'globalname': 1})) + + # builtins' `__getitem__` raises + code = compile("superglobal", "test", "exec") + self.assertRaises(setonlyerror, exec, code, + {'__builtins__': setonlydict({'superglobal': 1})}) + + def test_exec_globals_dict_subclass(self): + class customdict(dict): # this one should not do anything fancy + pass + + code = compile("superglobal", "test", "exec") + # works correctly + exec(code, {'__builtins__': customdict({'superglobal': 1})}) + # custom builtins dict subclass is missing key + self.assertRaisesRegex(NameError, "name 'superglobal' is not defined", + exec, code, {'__builtins__': customdict()}) + def test_exec_redirected(self): savestdout = sys.stdout sys.stdout = None # Whatever that cannot flush() @@ -2349,7 +2380,7 @@ def test_type_name(self): self.assertEqual(A.__module__, __name__) with self.assertRaises(ValueError): type('A\x00B', (), {}) - with self.assertRaises(ValueError): + with self.assertRaises(UnicodeEncodeError): type('A\udcdcB', (), {}) with self.assertRaises(TypeError): type(b'A', (), {}) @@ -2366,7 +2397,7 @@ def test_type_name(self): with self.assertRaises(ValueError): A.__name__ = 'A\x00B' self.assertEqual(A.__name__, 'C') - with self.assertRaises(ValueError): + with self.assertRaises(UnicodeEncodeError): A.__name__ = 'A\udcdcB' self.assertEqual(A.__name__, 'C') with self.assertRaises(TypeError): diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py index d4ddb7922ef37d..c17528be97b484 100644 --- a/Lib/test/test_call.py +++ b/Lib/test/test_call.py @@ -559,7 +559,7 @@ def __index__(self): self.kwargs.clear() gc.collect() return 0 - x = IntWithDict(dont_inherit=IntWithDict()) + x = IntWithDict(optimize=IntWithDict()) # We test the argument handling of "compile" here, the compilation # itself is not relevant. When we pass flags=x below, x.__index__() is # called, which changes the keywords dict. @@ -580,7 +580,7 @@ def testfunction_kw(self, *, kw): return self -QUICKENING_WARMUP_DELAY = 8 +ADAPTIVE_WARMUP_DELAY = 2 class TestPEP590(unittest.TestCase): @@ -771,7 +771,7 @@ def f(num): return num + 1 assert_equal(11, f(num)) function_setvectorcall(f) # make sure specializer is triggered by running > 50 times - for _ in range(10 * QUICKENING_WARMUP_DELAY): + for _ in range(10 * ADAPTIVE_WARMUP_DELAY): assert_equal("overridden", f(num)) def test_setvectorcall_load_attr_specialization_skip(self): @@ -787,7 +787,7 @@ def __getattribute__(self, attr): function_setvectorcall(X.__getattribute__) # make sure specialization doesn't trigger # when vectorcall is overridden - for _ in range(QUICKENING_WARMUP_DELAY): + for _ in range(ADAPTIVE_WARMUP_DELAY): assert_equal("overridden", x.a) def test_setvectorcall_load_attr_specialization_deopt(self): @@ -803,12 +803,12 @@ def get_a(x): assert_equal = self.assertEqual x = X() # trigger LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN specialization - for _ in range(QUICKENING_WARMUP_DELAY): + for _ in range(ADAPTIVE_WARMUP_DELAY): assert_equal("a", get_a(x)) function_setvectorcall(X.__getattribute__) # make sure specialized LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN # gets deopted due to overridden vectorcall - for _ in range(QUICKENING_WARMUP_DELAY): + for _ in range(ADAPTIVE_WARMUP_DELAY): assert_equal("overridden", get_a(x)) @requires_limited_api diff --git a/Lib/test/test_capi/__init__.py b/Lib/test/test_capi/__init__.py new file mode 100644 index 00000000000000..4b16ecc31156a5 --- /dev/null +++ b/Lib/test/test_capi/__init__.py @@ -0,0 +1,5 @@ +import os +from test.support import load_package_tests + +def load_tests(*args): + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_capi/__main__.py b/Lib/test/test_capi/__main__.py new file mode 100644 index 00000000000000..05d01775ddf43c --- /dev/null +++ b/Lib/test/test_capi/__main__.py @@ -0,0 +1,3 @@ +import unittest + +unittest.main('test.test_capi') diff --git a/Lib/test/test_capi/test_codecs.py b/Lib/test/test_capi/test_codecs.py new file mode 100644 index 00000000000000..e46726192aa05b --- /dev/null +++ b/Lib/test/test_capi/test_codecs.py @@ -0,0 +1,54 @@ +import unittest +from test.support import import_helper + +_testcapi = import_helper.import_module('_testcapi') + + +class CAPITest(unittest.TestCase): + + def test_decodeutf8(self): + """Test PyUnicode_DecodeUTF8()""" + decodeutf8 = _testcapi.unicode_decodeutf8 + + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: + b = s.encode('utf-8') + self.assertEqual(decodeutf8(b), s) + self.assertEqual(decodeutf8(b, 'strict'), s) + + self.assertRaises(UnicodeDecodeError, decodeutf8, b'\x80') + self.assertRaises(UnicodeDecodeError, decodeutf8, b'\xc0') + self.assertRaises(UnicodeDecodeError, decodeutf8, b'\xff') + self.assertRaises(UnicodeDecodeError, decodeutf8, b'a\xf0\x9f') + self.assertEqual(decodeutf8(b'a\xf0\x9f', 'replace'), 'a\ufffd') + self.assertEqual(decodeutf8(b'a\xf0\x9fb', 'replace'), 'a\ufffdb') + + self.assertRaises(LookupError, decodeutf8, b'a\x80', 'foo') + # TODO: Test PyUnicode_DecodeUTF8() with NULL as data and + # negative size. + + def test_decodeutf8stateful(self): + """Test PyUnicode_DecodeUTF8Stateful()""" + decodeutf8stateful = _testcapi.unicode_decodeutf8stateful + + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: + b = s.encode('utf-8') + self.assertEqual(decodeutf8stateful(b), (s, len(b))) + self.assertEqual(decodeutf8stateful(b, 'strict'), (s, len(b))) + + self.assertRaises(UnicodeDecodeError, decodeutf8stateful, b'\x80') + self.assertRaises(UnicodeDecodeError, decodeutf8stateful, b'\xc0') + self.assertRaises(UnicodeDecodeError, decodeutf8stateful, b'\xff') + self.assertEqual(decodeutf8stateful(b'a\xf0\x9f'), ('a', 1)) + self.assertEqual(decodeutf8stateful(b'a\xf0\x9f', 'replace'), ('a', 1)) + self.assertRaises(UnicodeDecodeError, decodeutf8stateful, b'a\xf0\x9fb') + self.assertEqual(decodeutf8stateful(b'a\xf0\x9fb', 'replace'), ('a\ufffdb', 4)) + + self.assertRaises(LookupError, decodeutf8stateful, b'a\x80', 'foo') + # TODO: Test PyUnicode_DecodeUTF8Stateful() with NULL as data and + # negative size. + # TODO: Test PyUnicode_DecodeUTF8Stateful() with NULL as the address of + # "consumed". + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_getargs2.py b/Lib/test/test_capi/test_getargs.py similarity index 100% rename from Lib/test/test_getargs2.py rename to Lib/test/test_capi/test_getargs.py diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi/test_misc.py similarity index 82% rename from Lib/test/test_capi.py rename to Lib/test/test_capi/test_misc.py index 49f207ed953c2d..06a51aa3cc219a 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi/test_misc.py @@ -2,7 +2,6 @@ # these are all functions _testcapi exports whose name begins with 'test_'. from collections import OrderedDict -from contextlib import contextmanager, ExitStack import _thread import importlib.machinery import importlib.util @@ -20,7 +19,6 @@ import weakref from test import support from test.support import MISSING_C_DOCSTRINGS -from test.support import catch_unraisable_exception from test.support import import_helper from test.support import threading_helper from test.support import warnings_helper @@ -140,8 +138,9 @@ def test_seq_bytes_to_charp_array(self): class Z(object): def __len__(self): return 1 - self.assertRaises(TypeError, _posixsubprocess.fork_exec, - 1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) + with self.assertRaisesRegex(TypeError, 'indexing'): + _posixsubprocess.fork_exec( + 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False) # Issue #15736: overflow in _PySequence_BytesToCharpArray() class Z(object): def __len__(self): @@ -149,7 +148,7 @@ def __len__(self): def __getitem__(self, i): return b'x' self.assertRaises(MemoryError, _posixsubprocess.fork_exec, - 1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) + 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False) @unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.') def test_subprocess_fork_exec(self): @@ -159,7 +158,7 @@ def __len__(self): # Issue #15738: crash in subprocess_fork_exec() self.assertRaises(TypeError, _posixsubprocess.fork_exec, - Z(),[b'1'],3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) + Z(),[b'1'],True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False) @unittest.skipIf(MISSING_C_DOCSTRINGS, "Signature information for builtins requires docstrings") @@ -416,6 +415,86 @@ class SubDict(dict): self.assertTrue(_testcapi.mapping_has_key(dct2, 'a')) self.assertFalse(_testcapi.mapping_has_key(dct2, 'b')) + def test_sequence_set_slice(self): + # Correct case: + data = [1, 2, 3, 4, 5] + data_copy = data.copy() + + _testcapi.sequence_set_slice(data, 1, 3, [8, 9]) + data_copy[1:3] = [8, 9] + self.assertEqual(data, data_copy) + self.assertEqual(data, [1, 8, 9, 4, 5]) + + # Custom class: + class Custom: + def __setitem__(self, index, value): + self.index = index + self.value = value + + c = Custom() + _testcapi.sequence_set_slice(c, 0, 5, 'abc') + self.assertEqual(c.index, slice(0, 5)) + self.assertEqual(c.value, 'abc') + + # Immutable sequences must raise: + bad_seq1 = (1, 2, 3, 4) + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(bad_seq1, 1, 3, (8, 9)) + self.assertEqual(bad_seq1, (1, 2, 3, 4)) + + bad_seq2 = 'abcd' + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(bad_seq2, 1, 3, 'xy') + self.assertEqual(bad_seq2, 'abcd') + + # Not a sequence: + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(None, 1, 3, 'xy') + + mapping = {1: 'a', 2: 'b', 3: 'c'} + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(mapping, 1, 3, 'xy') + self.assertEqual(mapping, {1: 'a', 2: 'b', 3: 'c'}) + + def test_sequence_del_slice(self): + # Correct case: + data = [1, 2, 3, 4, 5] + data_copy = data.copy() + + _testcapi.sequence_del_slice(data, 1, 3) + del data_copy[1:3] + self.assertEqual(data, data_copy) + self.assertEqual(data, [1, 4, 5]) + + # Custom class: + class Custom: + def __delitem__(self, index): + self.index = index + + c = Custom() + _testcapi.sequence_del_slice(c, 0, 5) + self.assertEqual(c.index, slice(0, 5)) + + # Immutable sequences must raise: + bad_seq1 = (1, 2, 3, 4) + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(bad_seq1, 1, 3) + self.assertEqual(bad_seq1, (1, 2, 3, 4)) + + bad_seq2 = 'abcd' + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(bad_seq2, 1, 3) + self.assertEqual(bad_seq2, 'abcd') + + # Not a sequence: + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(None, 1, 3) + + mapping = {1: 'a', 2: 'b', 3: 'c'} + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(mapping, 1, 3) + self.assertEqual(mapping, {1: 'a', 2: 'b', 3: 'c'}) + @unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'), 'need _testcapi.negative_refcount') def test_negative_refcount(self): @@ -907,6 +986,21 @@ def method_example(self): ... self.assertEqual(_testcapi.eval_get_func_name(sum), "sum") # c function self.assertEqual(_testcapi.eval_get_func_name(A), "type") + def test_eval_get_func_desc(self): + def function_example(): ... + + class A: + def method_example(self): ... + + self.assertEqual(_testcapi.eval_get_func_desc(function_example), + "()") + self.assertEqual(_testcapi.eval_get_func_desc(A.method_example), + "()") + self.assertEqual(_testcapi.eval_get_func_desc(A().method_example), + "()") + self.assertEqual(_testcapi.eval_get_func_desc(sum), "()") # c function + self.assertEqual(_testcapi.eval_get_func_desc(A), " object") + def test_function_get_code(self): import types @@ -943,7 +1037,14 @@ def some(): _testcapi.function_get_module(None) # not a function def test_function_get_defaults(self): - def some(pos_only='p', zero=0, optional=None): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): pass defaults = _testcapi.function_get_defaults(some) @@ -951,10 +1052,17 @@ def some(pos_only='p', zero=0, optional=None): self.assertEqual(defaults, some.__defaults__) with self.assertRaises(SystemError): - _testcapi.function_get_module(None) # not a function + _testcapi.function_get_defaults(None) # not a function def test_function_set_defaults(self): - def some(pos_only='p', zero=0, optional=None): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): pass old_defaults = ('p', 0, None) @@ -966,11 +1074,22 @@ def some(pos_only='p', zero=0, optional=None): self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) self.assertEqual(some.__defaults__, old_defaults) + with self.assertRaises(SystemError): + _testcapi.function_set_defaults(1, ()) # not a function + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + new_defaults = ('q', 1, None) _testcapi.function_set_defaults(some, new_defaults) self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) self.assertEqual(some.__defaults__, new_defaults) + # Empty tuple is fine: + new_defaults = () + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + class tuplesub(tuple): ... # tuple subclasses must work new_defaults = tuplesub(((1, 2), ['a', 'b'], None)) @@ -984,6 +1103,73 @@ class tuplesub(tuple): ... # tuple subclasses must work self.assertEqual(_testcapi.function_get_defaults(some), None) self.assertEqual(some.__defaults__, None) + def test_function_get_kw_defaults(self): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + defaults = _testcapi.function_get_kw_defaults(some) + self.assertEqual(defaults, {'kw2': True}) + self.assertEqual(defaults, some.__kwdefaults__) + + with self.assertRaises(SystemError): + _testcapi.function_get_kw_defaults(None) # not a function + + def test_function_set_kw_defaults(self): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + old_defaults = {'kw2': True} + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(some, 1) # not dict or None + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(1, {}) # not a function + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + new_defaults = {'kw2': (1, 2, 3)} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # Empty dict is fine: + new_defaults = {} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + class dictsub(dict): ... # dict subclasses must work + + new_defaults = dictsub({'kw2': None}) + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # `None` is special, it sets `kwdefaults` to `NULL`, + # it needs special handling in `_testcapi`: + _testcapi.function_set_kw_defaults(some, None) + self.assertEqual(_testcapi.function_get_kw_defaults(some), None) + self.assertEqual(some.__kwdefaults__, None) + class TestPendingCalls(unittest.TestCase): @@ -1490,27 +1676,6 @@ class Subclass(BaseException, self.module.StateAccessType): self.assertIs(Subclass().get_defining_module(), self.module) -class Test_FrameAPI(unittest.TestCase): - - def getframe(self): - return sys._getframe() - - def getgenframe(self): - yield sys._getframe() - - def test_frame_getters(self): - frame = self.getframe() - self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) - self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) - self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) - self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) - - def test_frame_get_generator(self): - gen = self.getgenframe() - frame = next(gen) - self.assertIs(gen, _testcapi.frame_getgenerator(frame)) - - SUFFICIENT_TO_DEOPT_AND_SPECIALIZE = 100 class Test_Pep523API(unittest.TestCase): @@ -1539,333 +1704,5 @@ def func2(x=None): self.do_test(func2) -class TestDictWatchers(unittest.TestCase): - # types of watchers testcapimodule can add: - EVENTS = 0 # appends dict events as strings to global event list - ERROR = 1 # unconditionally sets and signals a RuntimeException - SECOND = 2 # always appends "second" to global event list - - def add_watcher(self, kind=EVENTS): - return _testcapi.add_dict_watcher(kind) - - def clear_watcher(self, watcher_id): - _testcapi.clear_dict_watcher(watcher_id) - - @contextmanager - def watcher(self, kind=EVENTS): - wid = self.add_watcher(kind) - try: - yield wid - finally: - self.clear_watcher(wid) - - def assert_events(self, expected): - actual = _testcapi.get_dict_watcher_events() - self.assertEqual(actual, expected) - - def watch(self, wid, d): - _testcapi.watch_dict(wid, d) - - def unwatch(self, wid, d): - _testcapi.unwatch_dict(wid, d) - - def test_set_new_item(self): - d = {} - with self.watcher() as wid: - self.watch(wid, d) - d["foo"] = "bar" - self.assert_events(["new:foo:bar"]) - - def test_set_existing_item(self): - d = {"foo": "bar"} - with self.watcher() as wid: - self.watch(wid, d) - d["foo"] = "baz" - self.assert_events(["mod:foo:baz"]) - - def test_clone(self): - d = {} - d2 = {"foo": "bar"} - with self.watcher() as wid: - self.watch(wid, d) - d.update(d2) - self.assert_events(["clone"]) - - def test_no_event_if_not_watched(self): - d = {} - with self.watcher() as wid: - d["foo"] = "bar" - self.assert_events([]) - - def test_del(self): - d = {"foo": "bar"} - with self.watcher() as wid: - self.watch(wid, d) - del d["foo"] - self.assert_events(["del:foo"]) - - def test_pop(self): - d = {"foo": "bar"} - with self.watcher() as wid: - self.watch(wid, d) - d.pop("foo") - self.assert_events(["del:foo"]) - - def test_clear(self): - d = {"foo": "bar"} - with self.watcher() as wid: - self.watch(wid, d) - d.clear() - self.assert_events(["clear"]) - - def test_dealloc(self): - d = {"foo": "bar"} - with self.watcher() as wid: - self.watch(wid, d) - del d - self.assert_events(["dealloc"]) - - def test_unwatch(self): - d = {} - with self.watcher() as wid: - self.watch(wid, d) - d["foo"] = "bar" - self.unwatch(wid, d) - d["hmm"] = "baz" - self.assert_events(["new:foo:bar"]) - - def test_error(self): - d = {} - with self.watcher(kind=self.ERROR) as wid: - self.watch(wid, d) - with catch_unraisable_exception() as cm: - d["foo"] = "bar" - self.assertIs(cm.unraisable.object, d) - self.assertEqual(str(cm.unraisable.exc_value), "boom!") - self.assert_events([]) - - def test_two_watchers(self): - d1 = {} - d2 = {} - with self.watcher() as wid1: - with self.watcher(kind=self.SECOND) as wid2: - self.watch(wid1, d1) - self.watch(wid2, d2) - d1["foo"] = "bar" - d2["hmm"] = "baz" - self.assert_events(["new:foo:bar", "second"]) - - def test_watch_non_dict(self): - with self.watcher() as wid: - with self.assertRaisesRegex(ValueError, r"Cannot watch non-dictionary"): - self.watch(wid, 1) - - def test_watch_out_of_range_watcher_id(self): - d = {} - with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): - self.watch(-1, d) - with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): - self.watch(8, d) # DICT_MAX_WATCHERS = 8 - - def test_watch_unassigned_watcher_id(self): - d = {} - with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): - self.watch(1, d) - - def test_unwatch_non_dict(self): - with self.watcher() as wid: - with self.assertRaisesRegex(ValueError, r"Cannot watch non-dictionary"): - self.unwatch(wid, 1) - - def test_unwatch_out_of_range_watcher_id(self): - d = {} - with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): - self.unwatch(-1, d) - with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): - self.unwatch(8, d) # DICT_MAX_WATCHERS = 8 - - def test_unwatch_unassigned_watcher_id(self): - d = {} - with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): - self.unwatch(1, d) - - def test_clear_out_of_range_watcher_id(self): - with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): - self.clear_watcher(-1) - with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): - self.clear_watcher(8) # DICT_MAX_WATCHERS = 8 - - def test_clear_unassigned_watcher_id(self): - with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): - self.clear_watcher(1) - - -class TestTypeWatchers(unittest.TestCase): - # types of watchers testcapimodule can add: - TYPES = 0 # appends modified types to global event list - ERROR = 1 # unconditionally sets and signals a RuntimeException - WRAP = 2 # appends modified type wrapped in list to global event list - - # duplicating the C constant - TYPE_MAX_WATCHERS = 8 - - def add_watcher(self, kind=TYPES): - return _testcapi.add_type_watcher(kind) - - def clear_watcher(self, watcher_id): - _testcapi.clear_type_watcher(watcher_id) - - @contextmanager - def watcher(self, kind=TYPES): - wid = self.add_watcher(kind) - try: - yield wid - finally: - self.clear_watcher(wid) - - def assert_events(self, expected): - actual = _testcapi.get_type_modified_events() - self.assertEqual(actual, expected) - - def watch(self, wid, t): - _testcapi.watch_type(wid, t) - - def unwatch(self, wid, t): - _testcapi.unwatch_type(wid, t) - - def test_watch_type(self): - class C: pass - with self.watcher() as wid: - self.watch(wid, C) - C.foo = "bar" - self.assert_events([C]) - - def test_event_aggregation(self): - class C: pass - with self.watcher() as wid: - self.watch(wid, C) - C.foo = "bar" - C.bar = "baz" - # only one event registered for both modifications - self.assert_events([C]) - - def test_lookup_resets_aggregation(self): - class C: pass - with self.watcher() as wid: - self.watch(wid, C) - C.foo = "bar" - # lookup resets type version tag - self.assertEqual(C.foo, "bar") - C.bar = "baz" - # both events registered - self.assert_events([C, C]) - - def test_unwatch_type(self): - class C: pass - with self.watcher() as wid: - self.watch(wid, C) - C.foo = "bar" - self.assertEqual(C.foo, "bar") - self.assert_events([C]) - self.unwatch(wid, C) - C.bar = "baz" - self.assert_events([C]) - - def test_clear_watcher(self): - class C: pass - # outer watcher is unused, it's just to keep events list alive - with self.watcher() as _: - with self.watcher() as wid: - self.watch(wid, C) - C.foo = "bar" - self.assertEqual(C.foo, "bar") - self.assert_events([C]) - C.bar = "baz" - # Watcher on C has been cleared, no new event - self.assert_events([C]) - - def test_watch_type_subclass(self): - class C: pass - class D(C): pass - with self.watcher() as wid: - self.watch(wid, D) - C.foo = "bar" - self.assert_events([D]) - - def test_error(self): - class C: pass - with self.watcher(kind=self.ERROR) as wid: - self.watch(wid, C) - with catch_unraisable_exception() as cm: - C.foo = "bar" - self.assertIs(cm.unraisable.object, C) - self.assertEqual(str(cm.unraisable.exc_value), "boom!") - self.assert_events([]) - - def test_two_watchers(self): - class C1: pass - class C2: pass - with self.watcher() as wid1: - with self.watcher(kind=self.WRAP) as wid2: - self.assertNotEqual(wid1, wid2) - self.watch(wid1, C1) - self.watch(wid2, C2) - C1.foo = "bar" - C2.hmm = "baz" - self.assert_events([C1, [C2]]) - - def test_watch_non_type(self): - with self.watcher() as wid: - with self.assertRaisesRegex(ValueError, r"Cannot watch non-type"): - self.watch(wid, 1) - - def test_watch_out_of_range_watcher_id(self): - class C: pass - with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): - self.watch(-1, C) - with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): - self.watch(self.TYPE_MAX_WATCHERS, C) - - def test_watch_unassigned_watcher_id(self): - class C: pass - with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): - self.watch(1, C) - - def test_unwatch_non_type(self): - with self.watcher() as wid: - with self.assertRaisesRegex(ValueError, r"Cannot watch non-type"): - self.unwatch(wid, 1) - - def test_unwatch_out_of_range_watcher_id(self): - class C: pass - with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): - self.unwatch(-1, C) - with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): - self.unwatch(self.TYPE_MAX_WATCHERS, C) - - def test_unwatch_unassigned_watcher_id(self): - class C: pass - with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): - self.unwatch(1, C) - - def test_clear_out_of_range_watcher_id(self): - with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): - self.clear_watcher(-1) - with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): - self.clear_watcher(self.TYPE_MAX_WATCHERS) - - def test_clear_unassigned_watcher_id(self): - with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): - self.clear_watcher(1) - - def test_no_more_ids_available(self): - contexts = [self.watcher() for i in range(self.TYPE_MAX_WATCHERS)] - with ExitStack() as stack: - for ctx in contexts: - stack.enter_context(ctx) - with self.assertRaisesRegex(RuntimeError, r"no more type watcher IDs"): - self.add_watcher() - - if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_structmembers.py b/Lib/test/test_capi/test_structmembers.py similarity index 66% rename from Lib/test/test_structmembers.py rename to Lib/test/test_capi/test_structmembers.py index 07d2f623f7156e..2cf46b203478dc 100644 --- a/Lib/test/test_structmembers.py +++ b/Lib/test/test_capi/test_structmembers.py @@ -4,32 +4,42 @@ # Skip this test if the _testcapi module isn't available. import_helper.import_module('_testcapi') -from _testcapi import _test_structmembersType, \ - CHAR_MAX, CHAR_MIN, UCHAR_MAX, \ - SHRT_MAX, SHRT_MIN, USHRT_MAX, \ - INT_MAX, INT_MIN, UINT_MAX, \ - LONG_MAX, LONG_MIN, ULONG_MAX, \ - LLONG_MAX, LLONG_MIN, ULLONG_MAX, \ - PY_SSIZE_T_MAX, PY_SSIZE_T_MIN - -ts=_test_structmembersType(False, # T_BOOL - 1, # T_BYTE - 2, # T_UBYTE - 3, # T_SHORT - 4, # T_USHORT - 5, # T_INT - 6, # T_UINT - 7, # T_LONG - 8, # T_ULONG - 23, # T_PYSSIZET - 9.99999,# T_FLOAT - 10.1010101010, # T_DOUBLE - "hi" # T_STRING_INPLACE - ) - -class ReadWriteTests(unittest.TestCase): +from _testcapi import (_test_structmembersType_OldAPI, + _test_structmembersType_NewAPI, + CHAR_MAX, CHAR_MIN, UCHAR_MAX, + SHRT_MAX, SHRT_MIN, USHRT_MAX, + INT_MAX, INT_MIN, UINT_MAX, + LONG_MAX, LONG_MIN, ULONG_MAX, + LLONG_MAX, LLONG_MIN, ULLONG_MAX, + PY_SSIZE_T_MAX, PY_SSIZE_T_MIN, + ) + +# There are two classes: one using <structmember.h> and another using +# `Py_`-prefixed API. They should behave the same in Python + +def _make_test_object(cls): + return cls(False, # T_BOOL + 1, # T_BYTE + 2, # T_UBYTE + 3, # T_SHORT + 4, # T_USHORT + 5, # T_INT + 6, # T_UINT + 7, # T_LONG + 8, # T_ULONG + 23, # T_PYSSIZET + 9.99999,# T_FLOAT + 10.1010101010, # T_DOUBLE + "hi", # T_STRING_INPLACE + ) + + +class ReadWriteTests: + def setUp(self): + self.ts = _make_test_object(self.cls) def test_bool(self): + ts = self.ts ts.T_BOOL = True self.assertEqual(ts.T_BOOL, True) ts.T_BOOL = False @@ -37,6 +47,7 @@ def test_bool(self): self.assertRaises(TypeError, setattr, ts, 'T_BOOL', 1) def test_byte(self): + ts = self.ts ts.T_BYTE = CHAR_MAX self.assertEqual(ts.T_BYTE, CHAR_MAX) ts.T_BYTE = CHAR_MIN @@ -45,6 +56,7 @@ def test_byte(self): self.assertEqual(ts.T_UBYTE, UCHAR_MAX) def test_short(self): + ts = self.ts ts.T_SHORT = SHRT_MAX self.assertEqual(ts.T_SHORT, SHRT_MAX) ts.T_SHORT = SHRT_MIN @@ -53,6 +65,7 @@ def test_short(self): self.assertEqual(ts.T_USHORT, USHRT_MAX) def test_int(self): + ts = self.ts ts.T_INT = INT_MAX self.assertEqual(ts.T_INT, INT_MAX) ts.T_INT = INT_MIN @@ -61,6 +74,7 @@ def test_int(self): self.assertEqual(ts.T_UINT, UINT_MAX) def test_long(self): + ts = self.ts ts.T_LONG = LONG_MAX self.assertEqual(ts.T_LONG, LONG_MAX) ts.T_LONG = LONG_MIN @@ -69,13 +83,17 @@ def test_long(self): self.assertEqual(ts.T_ULONG, ULONG_MAX) def test_py_ssize_t(self): + ts = self.ts ts.T_PYSSIZET = PY_SSIZE_T_MAX self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MAX) ts.T_PYSSIZET = PY_SSIZE_T_MIN self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MIN) - @unittest.skipUnless(hasattr(ts, "T_LONGLONG"), "long long not present") def test_longlong(self): + ts = self.ts + if not hasattr(ts, "T_LONGLONG"): + self.skipTest("long long not present") + ts.T_LONGLONG = LLONG_MAX self.assertEqual(ts.T_LONGLONG, LLONG_MAX) ts.T_LONGLONG = LLONG_MIN @@ -91,6 +109,7 @@ def test_longlong(self): self.assertEqual(ts.T_ULONGLONG, 4) def test_bad_assignments(self): + ts = self.ts integer_attributes = [ 'T_BOOL', 'T_BYTE', 'T_UBYTE', @@ -109,37 +128,57 @@ def test_bad_assignments(self): self.assertRaises(TypeError, setattr, ts, attr, nonint) def test_inplace_string(self): + ts = self.ts self.assertEqual(ts.T_STRING_INPLACE, "hi") self.assertRaises(TypeError, setattr, ts, "T_STRING_INPLACE", "s") self.assertRaises(TypeError, delattr, ts, "T_STRING_INPLACE") +class ReadWriteTests_OldAPI(ReadWriteTests, unittest.TestCase): + cls = _test_structmembersType_OldAPI + +class ReadWriteTests_NewAPI(ReadWriteTests, unittest.TestCase): + cls = _test_structmembersType_NewAPI -class TestWarnings(unittest.TestCase): +class TestWarnings: + def setUp(self): + self.ts = _make_test_object(self.cls) def test_byte_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_BYTE = CHAR_MAX+1 def test_byte_min(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_BYTE = CHAR_MIN-1 def test_ubyte_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_UBYTE = UCHAR_MAX+1 def test_short_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_SHORT = SHRT_MAX+1 def test_short_min(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_SHORT = SHRT_MIN-1 def test_ushort_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_USHORT = USHRT_MAX+1 +class TestWarnings_OldAPI(TestWarnings, unittest.TestCase): + cls = _test_structmembersType_OldAPI + +class TestWarnings_NewAPI(TestWarnings, unittest.TestCase): + cls = _test_structmembersType_NewAPI + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py new file mode 100644 index 00000000000000..857579f758386f --- /dev/null +++ b/Lib/test/test_capi/test_unicode.py @@ -0,0 +1,918 @@ +import unittest +import sys +from test import support +from test.support import import_helper + +try: + import _testcapi +except ImportError: + _testcapi = None + + +NULL = None + +class Str(str): + pass + + +class CAPITest(unittest.TestCase): + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_fromobject(self): + """Test PyUnicode_FromObject()""" + from _testcapi import unicode_fromobject as fromobject + + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', + 'a\ud800b\udfffc', '\ud834\udd1e']: + self.assertEqual(fromobject(s), s) + o = Str(s) + s2 = fromobject(o) + self.assertEqual(s2, s) + self.assertIs(type(s2), str) + self.assertIsNot(s2, s) + + self.assertRaises(TypeError, fromobject, b'abc') + self.assertRaises(TypeError, fromobject, []) + # CRASHES fromobject(NULL) + + def test_from_format(self): + """Test PyUnicode_FromFormat()""" + import_helper.import_module('ctypes') + from ctypes import ( + c_char_p, + pythonapi, py_object, sizeof, + c_int, c_long, c_longlong, c_ssize_t, + c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) + name = "PyUnicode_FromFormat" + _PyUnicode_FromFormat = getattr(pythonapi, name) + _PyUnicode_FromFormat.argtypes = (c_char_p,) + _PyUnicode_FromFormat.restype = py_object + + def PyUnicode_FromFormat(format, *args): + cargs = tuple( + py_object(arg) if isinstance(arg, str) else arg + for arg in args) + return _PyUnicode_FromFormat(format, *cargs) + + def check_format(expected, format, *args): + text = PyUnicode_FromFormat(format, *args) + self.assertEqual(expected, text) + + # ascii format, non-ascii argument + check_format('ascii\x7f=unicode\xe9', + b'ascii\x7f=%U', 'unicode\xe9') + + # non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV() + # raises an error + self.assertRaisesRegex(ValueError, + r'^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format ' + 'string, got a non-ASCII byte: 0xe9$', + PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii') + + # test "%c" + check_format('\uabcd', + b'%c', c_int(0xabcd)) + check_format('\U0010ffff', + b'%c', c_int(0x10ffff)) + with self.assertRaises(OverflowError): + PyUnicode_FromFormat(b'%c', c_int(0x110000)) + # Issue #18183 + check_format('\U00010000\U00100000', + b'%c%c', c_int(0x10000), c_int(0x100000)) + + # test "%" + check_format('%', + b'%%') + check_format('%s', + b'%%s') + check_format('[%]', + b'[%%]') + check_format('%abc', + b'%%%s', b'abc') + + # truncated string + check_format('abc', + b'%.3s', b'abcdef') + check_format('abc[\ufffd', + b'%.5s', 'abc[\u20ac]'.encode('utf8')) + check_format("'\\u20acABC'", + b'%A', '\u20acABC') + check_format("'\\u20", + b'%.5A', '\u20acABCDEF') + check_format("'\u20acABC'", + b'%R', '\u20acABC') + check_format("'\u20acA", + b'%.3R', '\u20acABCDEF') + check_format('\u20acAB', + b'%.3S', '\u20acABCDEF') + check_format('\u20acAB', + b'%.3U', '\u20acABCDEF') + check_format('\u20acAB', + b'%.3V', '\u20acABCDEF', None) + check_format('abc[\ufffd', + b'%.5V', None, 'abc[\u20ac]'.encode('utf8')) + + # following tests comes from #7330 + # test width modifier and precision modifier with %S + check_format("repr= abc", + b'repr=%5S', 'abc') + check_format("repr=ab", + b'repr=%.2S', 'abc') + check_format("repr= ab", + b'repr=%5.2S', 'abc') + + # test width modifier and precision modifier with %R + check_format("repr= 'abc'", + b'repr=%8R', 'abc') + check_format("repr='ab", + b'repr=%.3R', 'abc') + check_format("repr= 'ab", + b'repr=%5.3R', 'abc') + + # test width modifier and precision modifier with %A + check_format("repr= 'abc'", + b'repr=%8A', 'abc') + check_format("repr='ab", + b'repr=%.3A', 'abc') + check_format("repr= 'ab", + b'repr=%5.3A', 'abc') + + # test width modifier and precision modifier with %s + check_format("repr= abc", + b'repr=%5s', b'abc') + check_format("repr=ab", + b'repr=%.2s', b'abc') + check_format("repr= ab", + b'repr=%5.2s', b'abc') + + # test width modifier and precision modifier with %U + check_format("repr= abc", + b'repr=%5U', 'abc') + check_format("repr=ab", + b'repr=%.2U', 'abc') + check_format("repr= ab", + b'repr=%5.2U', 'abc') + + # test width modifier and precision modifier with %V + check_format("repr= abc", + b'repr=%5V', 'abc', b'123') + check_format("repr=ab", + b'repr=%.2V', 'abc', b'123') + check_format("repr= ab", + b'repr=%5.2V', 'abc', b'123') + check_format("repr= 123", + b'repr=%5V', None, b'123') + check_format("repr=12", + b'repr=%.2V', None, b'123') + check_format("repr= 12", + b'repr=%5.2V', None, b'123') + + # test integer formats (%i, %d, %u) + check_format('010', + b'%03i', c_int(10)) + check_format('0010', + b'%0.4i', c_int(10)) + check_format('-123', + b'%i', c_int(-123)) + check_format('-123', + b'%li', c_long(-123)) + check_format('-123', + b'%lli', c_longlong(-123)) + check_format('-123', + b'%zi', c_ssize_t(-123)) + + check_format('-123', + b'%d', c_int(-123)) + check_format('-123', + b'%ld', c_long(-123)) + check_format('-123', + b'%lld', c_longlong(-123)) + check_format('-123', + b'%zd', c_ssize_t(-123)) + + check_format('123', + b'%u', c_uint(123)) + check_format('123', + b'%lu', c_ulong(123)) + check_format('123', + b'%llu', c_ulonglong(123)) + check_format('123', + b'%zu', c_size_t(123)) + + # test long output + min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1)) + max_longlong = -min_longlong - 1 + check_format(str(min_longlong), + b'%lld', c_longlong(min_longlong)) + check_format(str(max_longlong), + b'%lld', c_longlong(max_longlong)) + max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1 + check_format(str(max_ulonglong), + b'%llu', c_ulonglong(max_ulonglong)) + PyUnicode_FromFormat(b'%p', c_void_p(-1)) + + # test padding (width and/or precision) + check_format('123'.rjust(10, '0'), + b'%010i', c_int(123)) + check_format('123'.rjust(100), + b'%100i', c_int(123)) + check_format('123'.rjust(100, '0'), + b'%.100i', c_int(123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80i', c_int(123)) + + check_format('123'.rjust(10, '0'), + b'%010u', c_uint(123)) + check_format('123'.rjust(100), + b'%100u', c_uint(123)) + check_format('123'.rjust(100, '0'), + b'%.100u', c_uint(123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80u', c_uint(123)) + + check_format('123'.rjust(10, '0'), + b'%010x', c_int(0x123)) + check_format('123'.rjust(100), + b'%100x', c_int(0x123)) + check_format('123'.rjust(100, '0'), + b'%.100x', c_int(0x123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80x', c_int(0x123)) + + # test %A + check_format(r"%A:'abc\xe9\uabcd\U0010ffff'", + b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') + + # test %V + check_format('repr=abc', + b'repr=%V', 'abc', b'xyz') + + # test %p + # We cannot test the exact result, + # because it returns a hex representation of a C pointer, + # which is going to be different each time. But, we can test the format. + p_format_regex = r'^0x[a-zA-Z0-9]{3,}$' + p_format1 = PyUnicode_FromFormat(b'%p', 'abc') + self.assertIsInstance(p_format1, str) + self.assertRegex(p_format1, p_format_regex) + + p_format2 = PyUnicode_FromFormat(b'%p %p', '123456', b'xyz') + self.assertIsInstance(p_format2, str) + self.assertRegex(p_format2, + r'0x[a-zA-Z0-9]{3,} 0x[a-zA-Z0-9]{3,}') + + # Extra args are ignored: + p_format3 = PyUnicode_FromFormat(b'%p', '123456', None, b'xyz') + self.assertIsInstance(p_format3, str) + self.assertRegex(p_format3, p_format_regex) + + # Test string decode from parameter of %s using utf-8. + # b'\xe4\xba\xba\xe6\xb0\x91' is utf-8 encoded byte sequence of + # '\u4eba\u6c11' + check_format('repr=\u4eba\u6c11', + b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91') + + #Test replace error handler. + check_format('repr=abc\ufffd', + b'repr=%V', None, b'abc\xff') + + # Issue #33817: empty strings + check_format('', + b'') + check_format('', + b'%s', b'') + + # check for crashes + for fmt in (b'%', b'%0', b'%01', b'%.', b'%.1', + b'%0%s', b'%1%s', b'%.%s', b'%.1%s', b'%1abc', + b'%l', b'%ll', b'%z', b'%ls', b'%lls', b'%zs'): + with self.subTest(fmt=fmt): + self.assertRaisesRegex(SystemError, 'invalid format string', + PyUnicode_FromFormat, fmt, b'abc') + self.assertRaisesRegex(SystemError, 'invalid format string', + PyUnicode_FromFormat, b'%+i', c_int(10)) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_aswidechar(self): + """Test PyUnicode_AsWideChar()""" + from _testcapi import unicode_aswidechar + import_helper.import_module('ctypes') + from ctypes import c_wchar, sizeof + + wchar, size = unicode_aswidechar('abcdef', 2) + self.assertEqual(size, 2) + self.assertEqual(wchar, 'ab') + + wchar, size = unicode_aswidechar('abc', 3) + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc') + + wchar, size = unicode_aswidechar('abc', 4) + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc\0') + + wchar, size = unicode_aswidechar('abc', 10) + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc\0') + + wchar, size = unicode_aswidechar('abc\0def', 20) + self.assertEqual(size, 7) + self.assertEqual(wchar, 'abc\0def\0') + + nonbmp = chr(0x10ffff) + if sizeof(c_wchar) == 2: + buflen = 3 + nchar = 2 + else: # sizeof(c_wchar) == 4 + buflen = 2 + nchar = 1 + wchar, size = unicode_aswidechar(nonbmp, buflen) + self.assertEqual(size, nchar) + self.assertEqual(wchar, nonbmp + '\0') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_aswidecharstring(self): + """Test PyUnicode_AsWideCharString()""" + from _testcapi import unicode_aswidecharstring + import_helper.import_module('ctypes') + from ctypes import c_wchar, sizeof + + wchar, size = unicode_aswidecharstring('abc') + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc\0') + + wchar, size = unicode_aswidecharstring('abc\0def') + self.assertEqual(size, 7) + self.assertEqual(wchar, 'abc\0def\0') + + nonbmp = chr(0x10ffff) + if sizeof(c_wchar) == 2: + nchar = 2 + else: # sizeof(c_wchar) == 4 + nchar = 1 + wchar, size = unicode_aswidecharstring(nonbmp) + self.assertEqual(size, nchar) + self.assertEqual(wchar, nonbmp + '\0') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_asucs4(self): + """Test PyUnicode_AsUCS4()""" + from _testcapi import unicode_asucs4 + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', + 'a\ud800b\udfffc', '\ud834\udd1e']: + l = len(s) + self.assertEqual(unicode_asucs4(s, l, True), s+'\0') + self.assertEqual(unicode_asucs4(s, l, False), s+'\uffff') + self.assertEqual(unicode_asucs4(s, l+1, True), s+'\0\uffff') + self.assertEqual(unicode_asucs4(s, l+1, False), s+'\0\uffff') + self.assertRaises(SystemError, unicode_asucs4, s, l-1, True) + self.assertRaises(SystemError, unicode_asucs4, s, l-2, False) + s = '\0'.join([s, s]) + self.assertEqual(unicode_asucs4(s, len(s), True), s+'\0') + self.assertEqual(unicode_asucs4(s, len(s), False), s+'\uffff') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_asutf8(self): + """Test PyUnicode_AsUTF8()""" + from _testcapi import unicode_asutf8 + + bmp = '\u0100' + bmp2 = '\uffff' + nonbmp = chr(0x10ffff) + + self.assertEqual(unicode_asutf8(bmp), b'\xc4\x80') + self.assertEqual(unicode_asutf8(bmp2), b'\xef\xbf\xbf') + self.assertEqual(unicode_asutf8(nonbmp), b'\xf4\x8f\xbf\xbf') + self.assertRaises(UnicodeEncodeError, unicode_asutf8, 'a\ud800b\udfffc') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_asutf8andsize(self): + """Test PyUnicode_AsUTF8AndSize()""" + from _testcapi import unicode_asutf8andsize + + bmp = '\u0100' + bmp2 = '\uffff' + nonbmp = chr(0x10ffff) + + self.assertEqual(unicode_asutf8andsize(bmp), (b'\xc4\x80', 2)) + self.assertEqual(unicode_asutf8andsize(bmp2), (b'\xef\xbf\xbf', 3)) + self.assertEqual(unicode_asutf8andsize(nonbmp), (b'\xf4\x8f\xbf\xbf', 4)) + self.assertRaises(UnicodeEncodeError, unicode_asutf8andsize, 'a\ud800b\udfffc') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_concat(self): + """Test PyUnicode_Concat()""" + from _testcapi import unicode_concat as concat + + self.assertEqual(concat('abc', 'def'), 'abcdef') + self.assertEqual(concat('abc', 'где'), 'abcгде') + self.assertEqual(concat('абв', 'def'), 'абвdef') + self.assertEqual(concat('абв', 'где'), 'абвгде') + self.assertEqual(concat('a\0b', 'c\0d'), 'a\0bc\0d') + + self.assertRaises(TypeError, concat, b'abc', 'def') + self.assertRaises(TypeError, concat, 'abc', b'def') + self.assertRaises(TypeError, concat, b'abc', b'def') + self.assertRaises(TypeError, concat, [], 'def') + self.assertRaises(TypeError, concat, 'abc', []) + self.assertRaises(TypeError, concat, [], []) + # CRASHES concat(NULL, 'def') + # CRASHES concat('abc', NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_split(self): + """Test PyUnicode_Split()""" + from _testcapi import unicode_split as split + + self.assertEqual(split('a|b|c|d', '|'), ['a', 'b', 'c', 'd']) + self.assertEqual(split('a|b|c|d', '|', 2), ['a', 'b', 'c|d']) + self.assertEqual(split('a|b|c|d', '\u20ac'), ['a|b|c|d']) + self.assertEqual(split('a||b|c||d', '||'), ['a', 'b|c', 'd']) + self.assertEqual(split('а|б|в|г', '|'), ['а', 'б', 'в', 'г']) + self.assertEqual(split('абабагаламага', 'а'), + ['', 'б', 'б', 'г', 'л', 'м', 'г', '']) + self.assertEqual(split(' a\tb\nc\rd\ve\f', NULL), + ['a', 'b', 'c', 'd', 'e']) + self.assertEqual(split('a\x85b\xa0c\u1680d\u2000e', NULL), + ['a', 'b', 'c', 'd', 'e']) + + self.assertRaises(ValueError, split, 'a|b|c|d', '') + self.assertRaises(TypeError, split, 'a|b|c|d', ord('|')) + self.assertRaises(TypeError, split, [], '|') + # CRASHES split(NULL, '|') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_rsplit(self): + """Test PyUnicode_RSplit()""" + from _testcapi import unicode_rsplit as rsplit + + self.assertEqual(rsplit('a|b|c|d', '|'), ['a', 'b', 'c', 'd']) + self.assertEqual(rsplit('a|b|c|d', '|', 2), ['a|b', 'c', 'd']) + self.assertEqual(rsplit('a|b|c|d', '\u20ac'), ['a|b|c|d']) + self.assertEqual(rsplit('a||b|c||d', '||'), ['a', 'b|c', 'd']) + self.assertEqual(rsplit('а|б|в|г', '|'), ['а', 'б', 'в', 'г']) + self.assertEqual(rsplit('абабагаламага', 'а'), + ['', 'б', 'б', 'г', 'л', 'м', 'г', '']) + self.assertEqual(rsplit('aжbжcжd', 'ж'), ['a', 'b', 'c', 'd']) + self.assertEqual(rsplit(' a\tb\nc\rd\ve\f', NULL), + ['a', 'b', 'c', 'd', 'e']) + self.assertEqual(rsplit('a\x85b\xa0c\u1680d\u2000e', NULL), + ['a', 'b', 'c', 'd', 'e']) + + self.assertRaises(ValueError, rsplit, 'a|b|c|d', '') + self.assertRaises(TypeError, rsplit, 'a|b|c|d', ord('|')) + self.assertRaises(TypeError, rsplit, [], '|') + # CRASHES rsplit(NULL, '|') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_partition(self): + """Test PyUnicode_Partition()""" + from _testcapi import unicode_partition as partition + + self.assertEqual(partition('a|b|c', '|'), ('a', '|', 'b|c')) + self.assertEqual(partition('a||b||c', '||'), ('a', '||', 'b||c')) + self.assertEqual(partition('а|б|в', '|'), ('а', '|', 'б|в')) + self.assertEqual(partition('кабан', 'а'), ('к', 'а', 'бан')) + self.assertEqual(partition('aжbжc', 'ж'), ('a', 'ж', 'bжc')) + + self.assertRaises(ValueError, partition, 'a|b|c', '') + self.assertRaises(TypeError, partition, b'a|b|c', '|') + self.assertRaises(TypeError, partition, 'a|b|c', b'|') + self.assertRaises(TypeError, partition, 'a|b|c', ord('|')) + self.assertRaises(TypeError, partition, [], '|') + # CRASHES partition(NULL, '|') + # CRASHES partition('a|b|c', NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_rpartition(self): + """Test PyUnicode_RPartition()""" + from _testcapi import unicode_rpartition as rpartition + + self.assertEqual(rpartition('a|b|c', '|'), ('a|b', '|', 'c')) + self.assertEqual(rpartition('a||b||c', '||'), ('a||b', '||', 'c')) + self.assertEqual(rpartition('а|б|в', '|'), ('а|б', '|', 'в')) + self.assertEqual(rpartition('кабан', 'а'), ('каб', 'а', 'н')) + self.assertEqual(rpartition('aжbжc', 'ж'), ('aжb', 'ж', 'c')) + + self.assertRaises(ValueError, rpartition, 'a|b|c', '') + self.assertRaises(TypeError, rpartition, b'a|b|c', '|') + self.assertRaises(TypeError, rpartition, 'a|b|c', b'|') + self.assertRaises(TypeError, rpartition, 'a|b|c', ord('|')) + self.assertRaises(TypeError, rpartition, [], '|') + # CRASHES rpartition(NULL, '|') + # CRASHES rpartition('a|b|c', NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_splitlines(self): + """Test PyUnicode_SplitLines()""" + from _testcapi import unicode_splitlines as splitlines + + self.assertEqual(splitlines('a\nb\rc\r\nd'), ['a', 'b', 'c', 'd']) + self.assertEqual(splitlines('a\nb\rc\r\nd', True), + ['a\n', 'b\r', 'c\r\n', 'd']) + self.assertEqual(splitlines('a\x85b\u2028c\u2029d'), + ['a', 'b', 'c', 'd']) + self.assertEqual(splitlines('a\x85b\u2028c\u2029d', True), + ['a\x85', 'b\u2028', 'c\u2029', 'd']) + self.assertEqual(splitlines('а\nб\rв\r\nг'), ['а', 'б', 'в', 'г']) + + self.assertRaises(TypeError, splitlines, b'a\nb\rc\r\nd') + # CRASHES splitlines(NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_translate(self): + """Test PyUnicode_Translate()""" + from _testcapi import unicode_translate as translate + + self.assertEqual(translate('abcd', {ord('a'): 'A', ord('b'): ord('B'), ord('c'): '<>'}), 'AB<>d') + self.assertEqual(translate('абвг', {ord('а'): 'А', ord('б'): ord('Б'), ord('в'): '<>'}), 'АБ<>г') + self.assertEqual(translate('abc', {}), 'abc') + self.assertEqual(translate('abc', []), 'abc') + self.assertRaises(UnicodeTranslateError, translate, 'abc', {ord('b'): None}) + self.assertRaises(UnicodeTranslateError, translate, 'abc', {ord('b'): None}, 'strict') + self.assertRaises(LookupError, translate, 'abc', {ord('b'): None}, 'foo') + self.assertEqual(translate('abc', {ord('b'): None}, 'ignore'), 'ac') + self.assertEqual(translate('abc', {ord('b'): None}, 'replace'), 'a\ufffdc') + self.assertEqual(translate('abc', {ord('b'): None}, 'backslashreplace'), r'a\x62c') + # XXX Other error handlers do not support UnicodeTranslateError + self.assertRaises(TypeError, translate, b'abc', []) + self.assertRaises(TypeError, translate, 123, []) + self.assertRaises(TypeError, translate, 'abc', {ord('a'): b'A'}) + self.assertRaises(TypeError, translate, 'abc', 123) + self.assertRaises(TypeError, translate, 'abc', NULL) + self.assertRaises(LookupError, translate, 'abc', {ord('b'): None}, 'foo') + # CRASHES translate(NULL, []) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_join(self): + """Test PyUnicode_Join()""" + from _testcapi import unicode_join as join + self.assertEqual(join('|', ['a', 'b', 'c']), 'a|b|c') + self.assertEqual(join('|', ['a', '', 'c']), 'a||c') + self.assertEqual(join('', ['a', 'b', 'c']), 'abc') + self.assertEqual(join(NULL, ['a', 'b', 'c']), 'a b c') + self.assertEqual(join('|', ['а', 'б', 'в']), 'а|б|в') + self.assertEqual(join('ж', ['а', 'б', 'в']), 'ажбжв') + self.assertRaises(TypeError, join, b'|', ['a', 'b', 'c']) + self.assertRaises(TypeError, join, '|', [b'a', b'b', b'c']) + self.assertRaises(TypeError, join, NULL, [b'a', b'b', b'c']) + self.assertRaises(TypeError, join, '|', b'123') + self.assertRaises(TypeError, join, '|', 123) + self.assertRaises(SystemError, join, '|', NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_count(self): + """Test PyUnicode_Count()""" + from _testcapi import unicode_count + + for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": + for i, ch in enumerate(str): + self.assertEqual(unicode_count(str, ch, 0, len(str)), 1) + + str = "!>_<!" + self.assertEqual(unicode_count(str, 'z', 0, len(str)), 0) + self.assertEqual(unicode_count(str, '', 0, len(str)), len(str)+1) + # start < end + self.assertEqual(unicode_count(str, '!', 1, len(str)+1), 1) + # start >= end + self.assertEqual(unicode_count(str, '!', 0, 0), 0) + self.assertEqual(unicode_count(str, '!', len(str), 0), 0) + # negative + self.assertEqual(unicode_count(str, '!', -len(str), -1), 1) + # bad arguments + self.assertRaises(TypeError, unicode_count, str, b'!', 0, len(str)) + self.assertRaises(TypeError, unicode_count, b"!>_<!", '!', 0, len(str)) + self.assertRaises(TypeError, unicode_count, str, ord('!'), 0, len(str)) + self.assertRaises(TypeError, unicode_count, [], '!', 0, len(str), 1) + # CRASHES unicode_count(NULL, '!', 0, len(str)) + # CRASHES unicode_count(str, NULL, 0, len(str)) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_tailmatch(self): + """Test PyUnicode_Tailmatch()""" + from _testcapi import unicode_tailmatch as tailmatch + + str = 'ababahalamaha' + self.assertEqual(tailmatch(str, 'aba', 0, len(str), -1), 1) + self.assertEqual(tailmatch(str, 'aha', 0, len(str), 1), 1) + + self.assertEqual(tailmatch(str, 'aba', 0, sys.maxsize, -1), 1) + self.assertEqual(tailmatch(str, 'aba', -len(str), sys.maxsize, -1), 1) + self.assertEqual(tailmatch(str, 'aba', -sys.maxsize-1, len(str), -1), 1) + self.assertEqual(tailmatch(str, 'aha', 0, sys.maxsize, 1), 1) + self.assertEqual(tailmatch(str, 'aha', -sys.maxsize-1, len(str), 1), 1) + + self.assertEqual(tailmatch(str, 'z', 0, len(str), 1), 0) + self.assertEqual(tailmatch(str, 'z', 0, len(str), -1), 0) + self.assertEqual(tailmatch(str, '', 0, len(str), 1), 1) + self.assertEqual(tailmatch(str, '', 0, len(str), -1), 1) + + self.assertEqual(tailmatch(str, 'ba', 0, len(str)-1, -1), 0) + self.assertEqual(tailmatch(str, 'ba', 1, len(str)-1, -1), 1) + self.assertEqual(tailmatch(str, 'aba', 1, len(str)-1, -1), 0) + self.assertEqual(tailmatch(str, 'ba', -len(str)+1, -1, -1), 1) + self.assertEqual(tailmatch(str, 'ah', 0, len(str), 1), 0) + self.assertEqual(tailmatch(str, 'ah', 0, len(str)-1, 1), 1) + self.assertEqual(tailmatch(str, 'ah', -len(str), -1, 1), 1) + + # bad arguments + self.assertRaises(TypeError, tailmatch, str, ('aba', 'aha'), 0, len(str), -1) + self.assertRaises(TypeError, tailmatch, str, ('aba', 'aha'), 0, len(str), 1) + # CRASHES tailmatch(NULL, 'aba', 0, len(str), -1) + # CRASHES tailmatch(str, NULL, 0, len(str), -1) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_find(self): + """Test PyUnicode_Find()""" + from _testcapi import unicode_find as find + + for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": + for i, ch in enumerate(str): + self.assertEqual(find(str, ch, 0, len(str), 1), i) + self.assertEqual(find(str, ch, 0, len(str), -1), i) + + str = "!>_<!" + self.assertEqual(find(str, 'z', 0, len(str), 1), -1) + self.assertEqual(find(str, 'z', 0, len(str), -1), -1) + self.assertEqual(find(str, '', 0, len(str), 1), 0) + self.assertEqual(find(str, '', 0, len(str), -1), len(str)) + # start < end + self.assertEqual(find(str, '!', 1, len(str)+1, 1), 4) + self.assertEqual(find(str, '!', 1, len(str)+1, -1), 4) + # start >= end + self.assertEqual(find(str, '!', 0, 0, 1), -1) + self.assertEqual(find(str, '!', len(str), 0, 1), -1) + # negative + self.assertEqual(find(str, '!', -len(str), -1, 1), 0) + self.assertEqual(find(str, '!', -len(str), -1, -1), 0) + # bad arguments + self.assertRaises(TypeError, find, str, b'!', 0, len(str), 1) + self.assertRaises(TypeError, find, b"!>_<!", '!', 0, len(str), 1) + self.assertRaises(TypeError, find, str, ord('!'), 0, len(str), 1) + self.assertRaises(TypeError, find, [], '!', 0, len(str), 1) + # CRASHES find(NULL, '!', 0, len(str), 1) + # CRASHES find(str, NULL, 0, len(str), 1) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_findchar(self): + """Test PyUnicode_FindChar()""" + from _testcapi import unicode_findchar + + for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": + for i, ch in enumerate(str): + self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), 1), i) + self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), -1), i) + + str = "!>_<!" + self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), 1), -1) + self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), -1), -1) + # start < end + self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, 1), 4) + self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, -1), 4) + # start >= end + self.assertEqual(unicode_findchar(str, ord('!'), 0, 0, 1), -1) + self.assertEqual(unicode_findchar(str, ord('!'), len(str), 0, 1), -1) + # negative + self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, 1), 0) + self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, -1), 0) + # bad arguments + # CRASHES unicode_findchar(b"!>_<!", ord('!'), 0, len(str), 1) + # CRASHES unicode_findchar([], ord('!'), 0, len(str), 1) + # CRASHES unicode_findchar(NULL, ord('!'), 0, len(str), 1), 1) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_replace(self): + """Test PyUnicode_Replace()""" + from _testcapi import unicode_replace as replace + + str = 'abracadabra' + self.assertEqual(replace(str, 'a', '='), '=br=c=d=br=') + self.assertEqual(replace(str, 'a', '<>'), '<>br<>c<>d<>br<>') + self.assertEqual(replace(str, 'abra', '='), '=cad=') + self.assertEqual(replace(str, 'a', '=', 2), '=br=cadabra') + self.assertEqual(replace(str, 'a', '=', 0), str) + self.assertEqual(replace(str, 'a', '=', sys.maxsize), '=br=c=d=br=') + self.assertEqual(replace(str, 'z', '='), str) + self.assertEqual(replace(str, '', '='), '=a=b=r=a=c=a=d=a=b=r=a=') + self.assertEqual(replace(str, 'a', 'ж'), 'жbrжcжdжbrж') + self.assertEqual(replace('абабагаламага', 'а', '='), '=б=б=г=л=м=г=') + self.assertEqual(replace('Баден-Баден', 'Баден', 'Baden'), 'Baden-Baden') + # bad arguments + self.assertRaises(TypeError, replace, 'a', 'a', b'=') + self.assertRaises(TypeError, replace, 'a', b'a', '=') + self.assertRaises(TypeError, replace, b'a', 'a', '=') + self.assertRaises(TypeError, replace, 'a', 'a', ord('=')) + self.assertRaises(TypeError, replace, 'a', ord('a'), '=') + self.assertRaises(TypeError, replace, [], 'a', '=') + # CRASHES replace('a', 'a', NULL) + # CRASHES replace('a', NULL, '=') + # CRASHES replace(NULL, 'a', '=') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_compare(self): + """Test PyUnicode_Compare()""" + from _testcapi import unicode_compare as compare + + self.assertEqual(compare('abc', 'abc'), 0) + self.assertEqual(compare('abc', 'def'), -1) + self.assertEqual(compare('def', 'abc'), 1) + self.assertEqual(compare('abc', 'abc\0def'), -1) + self.assertEqual(compare('abc\0def', 'abc\0def'), 0) + self.assertEqual(compare('абв', 'abc'), 1) + + self.assertRaises(TypeError, compare, b'abc', 'abc') + self.assertRaises(TypeError, compare, 'abc', b'abc') + self.assertRaises(TypeError, compare, b'abc', b'abc') + self.assertRaises(TypeError, compare, [], 'abc') + self.assertRaises(TypeError, compare, 'abc', []) + self.assertRaises(TypeError, compare, [], []) + # CRASHES compare(NULL, 'abc') + # CRASHES compare('abc', NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_comparewithasciistring(self): + """Test PyUnicode_CompareWithASCIIString()""" + from _testcapi import unicode_comparewithasciistring as comparewithasciistring + + self.assertEqual(comparewithasciistring('abc', b'abc'), 0) + self.assertEqual(comparewithasciistring('abc', b'def'), -1) + self.assertEqual(comparewithasciistring('def', b'abc'), 1) + self.assertEqual(comparewithasciistring('abc', b'abc\0def'), 0) + self.assertEqual(comparewithasciistring('abc\0def', b'abc\0def'), 1) + self.assertEqual(comparewithasciistring('абв', b'abc'), 1) + + # CRASHES comparewithasciistring(b'abc', b'abc') + # CRASHES comparewithasciistring([], b'abc') + # CRASHES comparewithasciistring(NULL, b'abc') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_richcompare(self): + """Test PyUnicode_RichCompare()""" + from _testcapi import unicode_richcompare as richcompare + + LT, LE, EQ, NE, GT, GE = range(6) + strings = ('abc', 'абв', '\U0001f600', 'abc\0') + for s1 in strings: + for s2 in strings: + self.assertIs(richcompare(s1, s2, LT), s1 < s2) + self.assertIs(richcompare(s1, s2, LE), s1 <= s2) + self.assertIs(richcompare(s1, s2, EQ), s1 == s2) + self.assertIs(richcompare(s1, s2, NE), s1 != s2) + self.assertIs(richcompare(s1, s2, GT), s1 > s2) + self.assertIs(richcompare(s1, s2, GE), s1 >= s2) + + for op in LT, LE, EQ, NE, GT, GE: + self.assertIs(richcompare(b'abc', 'abc', op), NotImplemented) + self.assertIs(richcompare('abc', b'abc', op), NotImplemented) + self.assertIs(richcompare(b'abc', b'abc', op), NotImplemented) + self.assertIs(richcompare([], 'abc', op), NotImplemented) + self.assertIs(richcompare('abc', [], op), NotImplemented) + self.assertIs(richcompare([], [], op), NotImplemented) + + # CRASHES richcompare(NULL, 'abc', op) + # CRASHES richcompare('abc', NULL, op) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_format(self): + """Test PyUnicode_Format()""" + from _testcapi import unicode_format as format + + self.assertEqual(format('x=%d!', 42), 'x=42!') + self.assertEqual(format('x=%d!', (42,)), 'x=42!') + self.assertEqual(format('x=%d y=%s!', (42, [])), 'x=42 y=[]!') + + self.assertRaises(SystemError, format, 'x=%d!', NULL) + self.assertRaises(SystemError, format, NULL, 42) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_contains(self): + """Test PyUnicode_Contains()""" + from _testcapi import unicode_contains as contains + + self.assertEqual(contains('abcd', ''), 1) + self.assertEqual(contains('abcd', 'b'), 1) + self.assertEqual(contains('abcd', 'x'), 0) + self.assertEqual(contains('abcd', 'ж'), 0) + self.assertEqual(contains('abcd', '\0'), 0) + self.assertEqual(contains('abc\0def', '\0'), 1) + self.assertEqual(contains('abcd', 'bc'), 1) + + self.assertRaises(TypeError, contains, b'abcd', 'b') + self.assertRaises(TypeError, contains, 'abcd', b'b') + self.assertRaises(TypeError, contains, b'abcd', b'b') + self.assertRaises(TypeError, contains, [], 'b') + self.assertRaises(TypeError, contains, 'abcd', ord('b')) + # CRASHES contains(NULL, 'b') + # CRASHES contains('abcd', NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_isidentifier(self): + """Test PyUnicode_IsIdentifier()""" + from _testcapi import unicode_isidentifier as isidentifier + + self.assertEqual(isidentifier("a"), 1) + self.assertEqual(isidentifier("b0"), 1) + self.assertEqual(isidentifier("µ"), 1) + self.assertEqual(isidentifier("𝔘𝔫𝔦𝔠𝔬𝔡𝔢"), 1) + + self.assertEqual(isidentifier(""), 0) + self.assertEqual(isidentifier(" "), 0) + self.assertEqual(isidentifier("["), 0) + self.assertEqual(isidentifier("©"), 0) + self.assertEqual(isidentifier("0"), 0) + self.assertEqual(isidentifier("32M"), 0) + + # CRASHES isidentifier(b"a") + # CRASHES isidentifier([]) + # CRASHES isidentifier(NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_copycharacters(self): + """Test PyUnicode_CopyCharacters()""" + from _testcapi import unicode_copycharacters + + strings = [ + 'abcde', '\xa1\xa2\xa3\xa4\xa5', + '\u4f60\u597d\u4e16\u754c\uff01', + '\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604' + ] + + for idx, from_ in enumerate(strings): + # wide -> narrow: exceed maxchar limitation + for to in strings[:idx]: + self.assertRaises( + SystemError, + unicode_copycharacters, to, 0, from_, 0, 5 + ) + # same kind + for from_start in range(5): + self.assertEqual( + unicode_copycharacters(from_, 0, from_, from_start, 5), + (from_[from_start:from_start+5].ljust(5, '\0'), + 5-from_start) + ) + for to_start in range(5): + self.assertEqual( + unicode_copycharacters(from_, to_start, from_, to_start, 5), + (from_[to_start:to_start+5].rjust(5, '\0'), + 5-to_start) + ) + # narrow -> wide + # Tests omitted since this creates invalid strings. + + s = strings[0] + self.assertRaises(IndexError, unicode_copycharacters, s, 6, s, 0, 5) + self.assertRaises(IndexError, unicode_copycharacters, s, -1, s, 0, 5) + self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, 6, 5) + self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, -1, 5) + self.assertRaises(SystemError, unicode_copycharacters, s, 1, s, 0, 5) + self.assertRaises(SystemError, unicode_copycharacters, s, 0, s, 0, -1) + self.assertRaises(SystemError, unicode_copycharacters, s, 0, b'', 0, 0) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_pep393_utf8_caching_bug(self): + # Issue #25709: Problem with string concatenation and utf-8 cache + from _testcapi import getargs_s_hash + for k in 0x24, 0xa4, 0x20ac, 0x1f40d: + s = '' + for i in range(5): + # Due to CPython specific optimization the 's' string can be + # resized in-place. + s += chr(k) + # Parsing with the "s#" format code calls indirectly + # PyUnicode_AsUTF8AndSize() which creates the UTF-8 + # encoded string cached in the Unicode object. + self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) + # Check that the second call returns the same result + self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_watchers.py b/Lib/test/test_capi/test_watchers.py new file mode 100644 index 00000000000000..1922614ef60558 --- /dev/null +++ b/Lib/test/test_capi/test_watchers.py @@ -0,0 +1,501 @@ +import unittest + +from contextlib import contextmanager, ExitStack +from test.support import catch_unraisable_exception, import_helper + + +# Skip this test if the _testcapi module isn't available. +_testcapi = import_helper.import_module('_testcapi') + + +class TestDictWatchers(unittest.TestCase): + # types of watchers testcapimodule can add: + EVENTS = 0 # appends dict events as strings to global event list + ERROR = 1 # unconditionally sets and signals a RuntimeException + SECOND = 2 # always appends "second" to global event list + + def add_watcher(self, kind=EVENTS): + return _testcapi.add_dict_watcher(kind) + + def clear_watcher(self, watcher_id): + _testcapi.clear_dict_watcher(watcher_id) + + @contextmanager + def watcher(self, kind=EVENTS): + wid = self.add_watcher(kind) + try: + yield wid + finally: + self.clear_watcher(wid) + + def assert_events(self, expected): + actual = _testcapi.get_dict_watcher_events() + self.assertEqual(actual, expected) + + def watch(self, wid, d): + _testcapi.watch_dict(wid, d) + + def unwatch(self, wid, d): + _testcapi.unwatch_dict(wid, d) + + def test_set_new_item(self): + d = {} + with self.watcher() as wid: + self.watch(wid, d) + d["foo"] = "bar" + self.assert_events(["new:foo:bar"]) + + def test_set_existing_item(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d["foo"] = "baz" + self.assert_events(["mod:foo:baz"]) + + def test_clone(self): + d = {} + d2 = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d.update(d2) + self.assert_events(["clone"]) + + def test_no_event_if_not_watched(self): + d = {} + with self.watcher() as wid: + d["foo"] = "bar" + self.assert_events([]) + + def test_del(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + del d["foo"] + self.assert_events(["del:foo"]) + + def test_pop(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d.pop("foo") + self.assert_events(["del:foo"]) + + def test_clear(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d.clear() + self.assert_events(["clear"]) + + def test_dealloc(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + del d + self.assert_events(["dealloc"]) + + def test_unwatch(self): + d = {} + with self.watcher() as wid: + self.watch(wid, d) + d["foo"] = "bar" + self.unwatch(wid, d) + d["hmm"] = "baz" + self.assert_events(["new:foo:bar"]) + + def test_error(self): + d = {} + with self.watcher(kind=self.ERROR) as wid: + self.watch(wid, d) + with catch_unraisable_exception() as cm: + d["foo"] = "bar" + self.assertIs(cm.unraisable.object, d) + self.assertEqual(str(cm.unraisable.exc_value), "boom!") + self.assert_events([]) + + def test_two_watchers(self): + d1 = {} + d2 = {} + with self.watcher() as wid1: + with self.watcher(kind=self.SECOND) as wid2: + self.watch(wid1, d1) + self.watch(wid2, d2) + d1["foo"] = "bar" + d2["hmm"] = "baz" + self.assert_events(["new:foo:bar", "second"]) + + def test_watch_non_dict(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-dictionary"): + self.watch(wid, 1) + + def test_watch_out_of_range_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): + self.watch(-1, d) + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): + self.watch(8, d) # DICT_MAX_WATCHERS = 8 + + def test_watch_unassigned_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): + self.watch(1, d) + + def test_unwatch_non_dict(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-dictionary"): + self.unwatch(wid, 1) + + def test_unwatch_out_of_range_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): + self.unwatch(-1, d) + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): + self.unwatch(8, d) # DICT_MAX_WATCHERS = 8 + + def test_unwatch_unassigned_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): + self.unwatch(1, d) + + def test_clear_out_of_range_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): + self.clear_watcher(-1) + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): + self.clear_watcher(8) # DICT_MAX_WATCHERS = 8 + + def test_clear_unassigned_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): + self.clear_watcher(1) + + +class TestTypeWatchers(unittest.TestCase): + # types of watchers testcapimodule can add: + TYPES = 0 # appends modified types to global event list + ERROR = 1 # unconditionally sets and signals a RuntimeException + WRAP = 2 # appends modified type wrapped in list to global event list + + # duplicating the C constant + TYPE_MAX_WATCHERS = 8 + + def add_watcher(self, kind=TYPES): + return _testcapi.add_type_watcher(kind) + + def clear_watcher(self, watcher_id): + _testcapi.clear_type_watcher(watcher_id) + + @contextmanager + def watcher(self, kind=TYPES): + wid = self.add_watcher(kind) + try: + yield wid + finally: + self.clear_watcher(wid) + + def assert_events(self, expected): + actual = _testcapi.get_type_modified_events() + self.assertEqual(actual, expected) + + def watch(self, wid, t): + _testcapi.watch_type(wid, t) + + def unwatch(self, wid, t): + _testcapi.unwatch_type(wid, t) + + def test_watch_type(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + self.assert_events([C]) + + def test_event_aggregation(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + C.bar = "baz" + # only one event registered for both modifications + self.assert_events([C]) + + def test_lookup_resets_aggregation(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + # lookup resets type version tag + self.assertEqual(C.foo, "bar") + C.bar = "baz" + # both events registered + self.assert_events([C, C]) + + def test_unwatch_type(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + self.assertEqual(C.foo, "bar") + self.assert_events([C]) + self.unwatch(wid, C) + C.bar = "baz" + self.assert_events([C]) + + def test_clear_watcher(self): + class C: pass + # outer watcher is unused, it's just to keep events list alive + with self.watcher() as _: + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + self.assertEqual(C.foo, "bar") + self.assert_events([C]) + C.bar = "baz" + # Watcher on C has been cleared, no new event + self.assert_events([C]) + + def test_watch_type_subclass(self): + class C: pass + class D(C): pass + with self.watcher() as wid: + self.watch(wid, D) + C.foo = "bar" + self.assert_events([D]) + + def test_error(self): + class C: pass + with self.watcher(kind=self.ERROR) as wid: + self.watch(wid, C) + with catch_unraisable_exception() as cm: + C.foo = "bar" + self.assertIs(cm.unraisable.object, C) + self.assertEqual(str(cm.unraisable.exc_value), "boom!") + self.assert_events([]) + + def test_two_watchers(self): + class C1: pass + class C2: pass + with self.watcher() as wid1: + with self.watcher(kind=self.WRAP) as wid2: + self.assertNotEqual(wid1, wid2) + self.watch(wid1, C1) + self.watch(wid2, C2) + C1.foo = "bar" + C2.hmm = "baz" + self.assert_events([C1, [C2]]) + + def test_watch_non_type(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-type"): + self.watch(wid, 1) + + def test_watch_out_of_range_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): + self.watch(-1, C) + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): + self.watch(self.TYPE_MAX_WATCHERS, C) + + def test_watch_unassigned_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): + self.watch(1, C) + + def test_unwatch_non_type(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-type"): + self.unwatch(wid, 1) + + def test_unwatch_out_of_range_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): + self.unwatch(-1, C) + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): + self.unwatch(self.TYPE_MAX_WATCHERS, C) + + def test_unwatch_unassigned_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): + self.unwatch(1, C) + + def test_clear_out_of_range_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): + self.clear_watcher(-1) + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): + self.clear_watcher(self.TYPE_MAX_WATCHERS) + + def test_clear_unassigned_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): + self.clear_watcher(1) + + def test_no_more_ids_available(self): + contexts = [self.watcher() for i in range(self.TYPE_MAX_WATCHERS)] + with ExitStack() as stack: + for ctx in contexts: + stack.enter_context(ctx) + with self.assertRaisesRegex(RuntimeError, r"no more type watcher IDs"): + self.add_watcher() + + +class TestCodeObjectWatchers(unittest.TestCase): + @contextmanager + def code_watcher(self, which_watcher): + wid = _testcapi.add_code_watcher(which_watcher) + try: + yield wid + finally: + _testcapi.clear_code_watcher(wid) + + def assert_event_counts(self, exp_created_0, exp_destroyed_0, + exp_created_1, exp_destroyed_1): + self.assertEqual( + exp_created_0, _testcapi.get_code_watcher_num_created_events(0)) + self.assertEqual( + exp_destroyed_0, _testcapi.get_code_watcher_num_destroyed_events(0)) + self.assertEqual( + exp_created_1, _testcapi.get_code_watcher_num_created_events(1)) + self.assertEqual( + exp_destroyed_1, _testcapi.get_code_watcher_num_destroyed_events(1)) + + def test_code_object_events_dispatched(self): + # verify that all counts are zero before any watchers are registered + self.assert_event_counts(0, 0, 0, 0) + + # verify that all counts remain zero when a code object is + # created and destroyed with no watchers registered + co1 = _testcapi.code_newempty("test_watchers", "dummy1", 0) + self.assert_event_counts(0, 0, 0, 0) + del co1 + self.assert_event_counts(0, 0, 0, 0) + + # verify counts are as expected when first watcher is registered + with self.code_watcher(0): + self.assert_event_counts(0, 0, 0, 0) + co2 = _testcapi.code_newempty("test_watchers", "dummy2", 0) + self.assert_event_counts(1, 0, 0, 0) + del co2 + self.assert_event_counts(1, 1, 0, 0) + + # again with second watcher registered + with self.code_watcher(1): + self.assert_event_counts(1, 1, 0, 0) + co3 = _testcapi.code_newempty("test_watchers", "dummy3", 0) + self.assert_event_counts(2, 1, 1, 0) + del co3 + self.assert_event_counts(2, 2, 1, 1) + + # verify counts are reset and don't change after both watchers are cleared + co4 = _testcapi.code_newempty("test_watchers", "dummy4", 0) + self.assert_event_counts(0, 0, 0, 0) + del co4 + self.assert_event_counts(0, 0, 0, 0) + + def test_clear_out_of_range_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"Invalid code watcher ID -1"): + _testcapi.clear_code_watcher(-1) + with self.assertRaisesRegex(ValueError, r"Invalid code watcher ID 8"): + _testcapi.clear_code_watcher(8) # CODE_MAX_WATCHERS = 8 + + def test_clear_unassigned_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"No code watcher set for ID 1"): + _testcapi.clear_code_watcher(1) + + def test_allocate_too_many_watchers(self): + with self.assertRaisesRegex(RuntimeError, r"no more code watcher IDs available"): + _testcapi.allocate_too_many_code_watchers() + + +class TestFuncWatchers(unittest.TestCase): + @contextmanager + def add_watcher(self, func): + wid = _testcapi.add_func_watcher(func) + try: + yield + finally: + _testcapi.clear_func_watcher(wid) + + def test_func_events_dispatched(self): + events = [] + def watcher(*args): + events.append(args) + + with self.add_watcher(watcher): + def myfunc(): + pass + self.assertIn((_testcapi.PYFUNC_EVENT_CREATE, myfunc, None), events) + myfunc_id = id(myfunc) + + new_code = self.test_func_events_dispatched.__code__ + myfunc.__code__ = new_code + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_CODE, myfunc, new_code), events) + + new_defaults = (123,) + myfunc.__defaults__ = new_defaults + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_DEFAULTS, myfunc, new_defaults), events) + + new_defaults = (456,) + _testcapi.set_func_defaults_via_capi(myfunc, new_defaults) + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_DEFAULTS, myfunc, new_defaults), events) + + new_kwdefaults = {"self": 123} + myfunc.__kwdefaults__ = new_kwdefaults + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_KWDEFAULTS, myfunc, new_kwdefaults), events) + + new_kwdefaults = {"self": 456} + _testcapi.set_func_kwdefaults_via_capi(myfunc, new_kwdefaults) + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_KWDEFAULTS, myfunc, new_kwdefaults), events) + + # Clear events reference to func + events = [] + del myfunc + self.assertIn((_testcapi.PYFUNC_EVENT_DESTROY, myfunc_id, None), events) + + def test_multiple_watchers(self): + events0 = [] + def first_watcher(*args): + events0.append(args) + + events1 = [] + def second_watcher(*args): + events1.append(args) + + with self.add_watcher(first_watcher): + with self.add_watcher(second_watcher): + def myfunc(): + pass + + event = (_testcapi.PYFUNC_EVENT_CREATE, myfunc, None) + self.assertIn(event, events0) + self.assertIn(event, events1) + + def test_watcher_raises_error(self): + class MyError(Exception): + pass + + def watcher(*args): + raise MyError("testing 123") + + with self.add_watcher(watcher): + with catch_unraisable_exception() as cm: + def myfunc(): + pass + + self.assertIs(cm.unraisable.object, myfunc) + self.assertIsInstance(cm.unraisable.exc_value, MyError) + + def test_clear_out_of_range_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"invalid func watcher ID -1"): + _testcapi.clear_func_watcher(-1) + with self.assertRaisesRegex(ValueError, r"invalid func watcher ID 8"): + _testcapi.clear_func_watcher(8) # FUNC_MAX_WATCHERS = 8 + + def test_clear_unassigned_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"no func watcher set for ID 1"): + _testcapi.clear_func_watcher(1) + + def test_allocate_too_many_watchers(self): + with self.assertRaisesRegex(RuntimeError, r"no more func watcher IDs"): + _testcapi.allocate_too_many_func_watchers() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_check_c_globals.py b/Lib/test/test_check_c_globals.py index 898807a5e69259..670be52422f799 100644 --- a/Lib/test/test_check_c_globals.py +++ b/Lib/test/test_check_c_globals.py @@ -2,6 +2,11 @@ import test.test_tools from test.support.warnings_helper import save_restore_warnings_filters + +# TODO: gh-92584: c-analyzer uses distutils which was removed in Python 3.12 +raise unittest.SkipTest("distutils has been removed in Python 3.12") + + test.test_tools.skip_if_missing('c-analyzer') with test.test_tools.imports_under_tool('c-analyzer'): # gh-95349: Save/restore warnings filters to leave them unchanged. diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py index 8ab40c694f711d..4abf739cf52ca3 100644 --- a/Lib/test/test_clinic.py +++ b/Lib/test/test_clinic.py @@ -3,7 +3,7 @@ # Licensed to the PSF under a contributor agreement. from test import support, test_tools -from test.support import os_helper +from test.support import import_helper, os_helper from unittest import TestCase import collections import inspect @@ -730,6 +730,15 @@ def test_parameters_not_permitted_after_slash_for_now(self): x: int """) + def test_parameters_no_more_than_one_vararg(self): + s = self.parse_function_should_fail(""" +module foo +foo.bar + *vararg1: object + *vararg2: object +""") + self.assertEqual(s, "Error on line 0:\nToo many var args\n") + def test_function_not_at_column_0(self): function = self.parse_function(""" module foo @@ -820,5 +829,461 @@ def test_external(self): self.assertEqual(new_mtime_ns, old_mtime_ns) +ac_tester = import_helper.import_module('_testclinic') + + +class ClinicFunctionalTest(unittest.TestCase): + locals().update((name, getattr(ac_tester, name)) + for name in dir(ac_tester) if name.startswith('test_')) + + def test_objects_converter(self): + with self.assertRaises(TypeError): + ac_tester.objects_converter() + self.assertEqual(ac_tester.objects_converter(1, 2), (1, 2)) + self.assertEqual(ac_tester.objects_converter([], 'whatever class'), ([], 'whatever class')) + self.assertEqual(ac_tester.objects_converter(1), (1, None)) + + def test_bytes_object_converter(self): + with self.assertRaises(TypeError): + ac_tester.bytes_object_converter(1) + self.assertEqual(ac_tester.bytes_object_converter(b'BytesObject'), (b'BytesObject',)) + + def test_byte_array_object_converter(self): + with self.assertRaises(TypeError): + ac_tester.byte_array_object_converter(1) + byte_arr = bytearray(b'ByteArrayObject') + self.assertEqual(ac_tester.byte_array_object_converter(byte_arr), (byte_arr,)) + + def test_unicode_converter(self): + with self.assertRaises(TypeError): + ac_tester.unicode_converter(1) + self.assertEqual(ac_tester.unicode_converter('unicode'), ('unicode',)) + + def test_bool_converter(self): + with self.assertRaises(TypeError): + ac_tester.bool_converter(False, False, 'not a int') + self.assertEqual(ac_tester.bool_converter(), (True, True, True)) + self.assertEqual(ac_tester.bool_converter('', [], 5), (False, False, True)) + self.assertEqual(ac_tester.bool_converter(('not empty',), {1: 2}, 0), (True, True, False)) + + def test_char_converter(self): + with self.assertRaises(TypeError): + ac_tester.char_converter(1) + with self.assertRaises(TypeError): + ac_tester.char_converter(b'ab') + chars = [b'A', b'\a', b'\b', b'\t', b'\n', b'\v', b'\f', b'\r', b'"', b"'", b'?', b'\\', b'\000', b'\377'] + expected = tuple(ord(c) for c in chars) + self.assertEqual(ac_tester.char_converter(), expected) + chars = [b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'0', b'a', b'b', b'c', b'd'] + expected = tuple(ord(c) for c in chars) + self.assertEqual(ac_tester.char_converter(*chars), expected) + + def test_unsigned_char_converter(self): + from _testcapi import UCHAR_MAX + with self.assertRaises(OverflowError): + ac_tester.unsigned_char_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_char_converter(UCHAR_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_char_converter(0, UCHAR_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_char_converter([]) + self.assertEqual(ac_tester.unsigned_char_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_char_converter(0, 0, UCHAR_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_char_converter(0, 0, (UCHAR_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_short_converter(self): + from _testcapi import SHRT_MIN, SHRT_MAX + with self.assertRaises(OverflowError): + ac_tester.short_converter(SHRT_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.short_converter(SHRT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.short_converter([]) + self.assertEqual(ac_tester.short_converter(-1234), (-1234,)) + self.assertEqual(ac_tester.short_converter(4321), (4321,)) + + def test_unsigned_short_converter(self): + from _testcapi import USHRT_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_short_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_short_converter(USHRT_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_short_converter(0, USHRT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_short_converter([]) + self.assertEqual(ac_tester.unsigned_short_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_short_converter(0, 0, USHRT_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_short_converter(0, 0, (USHRT_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_int_converter(self): + from _testcapi import INT_MIN, INT_MAX + with self.assertRaises(OverflowError): + ac_tester.int_converter(INT_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.int_converter(INT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.int_converter(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.int_converter([]) + self.assertEqual(ac_tester.int_converter(), (12, 34, 45)) + self.assertEqual(ac_tester.int_converter(1, 2, '3'), (1, 2, ord('3'))) + + def test_unsigned_int_converter(self): + from _testcapi import UINT_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_int_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_int_converter(UINT_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_int_converter(0, UINT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_int_converter([]) + self.assertEqual(ac_tester.unsigned_int_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_int_converter(0, 0, UINT_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_int_converter(0, 0, (UINT_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_long_converter(self): + from _testcapi import LONG_MIN, LONG_MAX + with self.assertRaises(OverflowError): + ac_tester.long_converter(LONG_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.long_converter(LONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.long_converter([]) + self.assertEqual(ac_tester.long_converter(), (12,)) + self.assertEqual(ac_tester.long_converter(-1234), (-1234,)) + + def test_unsigned_long_converter(self): + from _testcapi import ULONG_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_long_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_converter(ULONG_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_converter(0, ULONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_long_converter([]) + self.assertEqual(ac_tester.unsigned_long_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_long_converter(0, 0, ULONG_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_long_converter(0, 0, (ULONG_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_long_long_converter(self): + from _testcapi import LLONG_MIN, LLONG_MAX + with self.assertRaises(OverflowError): + ac_tester.long_long_converter(LLONG_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.long_long_converter(LLONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.long_long_converter([]) + self.assertEqual(ac_tester.long_long_converter(), (12,)) + self.assertEqual(ac_tester.long_long_converter(-1234), (-1234,)) + + def test_unsigned_long_long_converter(self): + from _testcapi import ULLONG_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_long_long_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_long_converter(ULLONG_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_long_converter(0, ULLONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_long_long_converter([]) + self.assertEqual(ac_tester.unsigned_long_long_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_long_long_converter(0, 0, ULLONG_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_long_long_converter(0, 0, (ULLONG_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_py_ssize_t_converter(self): + from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX + with self.assertRaises(OverflowError): + ac_tester.py_ssize_t_converter(PY_SSIZE_T_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.py_ssize_t_converter(PY_SSIZE_T_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.py_ssize_t_converter([]) + self.assertEqual(ac_tester.py_ssize_t_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.py_ssize_t_converter(1, 2, None), (1, 2, 56)) + + def test_slice_index_converter(self): + from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX + with self.assertRaises(TypeError): + ac_tester.slice_index_converter([]) + self.assertEqual(ac_tester.slice_index_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.slice_index_converter(1, 2, None), (1, 2, 56)) + self.assertEqual(ac_tester.slice_index_converter(PY_SSIZE_T_MAX, PY_SSIZE_T_MAX + 1, PY_SSIZE_T_MAX + 1234), + (PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX)) + self.assertEqual(ac_tester.slice_index_converter(PY_SSIZE_T_MIN, PY_SSIZE_T_MIN - 1, PY_SSIZE_T_MIN - 1234), + (PY_SSIZE_T_MIN, PY_SSIZE_T_MIN, PY_SSIZE_T_MIN)) + + def test_size_t_converter(self): + with self.assertRaises(ValueError): + ac_tester.size_t_converter(-1) + with self.assertRaises(TypeError): + ac_tester.size_t_converter([]) + self.assertEqual(ac_tester.size_t_converter(), (12,)) + + def test_float_converter(self): + with self.assertRaises(TypeError): + ac_tester.float_converter([]) + self.assertEqual(ac_tester.float_converter(), (12.5,)) + self.assertEqual(ac_tester.float_converter(-0.5), (-0.5,)) + + def test_double_converter(self): + with self.assertRaises(TypeError): + ac_tester.double_converter([]) + self.assertEqual(ac_tester.double_converter(), (12.5,)) + self.assertEqual(ac_tester.double_converter(-0.5), (-0.5,)) + + def test_py_complex_converter(self): + with self.assertRaises(TypeError): + ac_tester.py_complex_converter([]) + self.assertEqual(ac_tester.py_complex_converter(complex(1, 2)), (complex(1, 2),)) + self.assertEqual(ac_tester.py_complex_converter(complex('-1-2j')), (complex('-1-2j'),)) + self.assertEqual(ac_tester.py_complex_converter(-0.5), (-0.5,)) + self.assertEqual(ac_tester.py_complex_converter(10), (10,)) + + def test_str_converter(self): + with self.assertRaises(TypeError): + ac_tester.str_converter(1) + with self.assertRaises(TypeError): + ac_tester.str_converter('a', 'b', 'c') + with self.assertRaises(ValueError): + ac_tester.str_converter('a', b'b\0b', 'c') + self.assertEqual(ac_tester.str_converter('a', b'b', 'c'), ('a', 'b', 'c')) + self.assertEqual(ac_tester.str_converter('a', b'b', b'c'), ('a', 'b', 'c')) + self.assertEqual(ac_tester.str_converter('a', b'b', 'c\0c'), ('a', 'b', 'c\0c')) + + def test_str_converter_encoding(self): + with self.assertRaises(TypeError): + ac_tester.str_converter_encoding(1) + self.assertEqual(ac_tester.str_converter_encoding('a', 'b', 'c'), ('a', 'b', 'c')) + with self.assertRaises(TypeError): + ac_tester.str_converter_encoding('a', b'b\0b', 'c') + self.assertEqual(ac_tester.str_converter_encoding('a', b'b', bytearray([ord('c')])), ('a', 'b', 'c')) + self.assertEqual(ac_tester.str_converter_encoding('a', b'b', bytearray([ord('c'), 0, ord('c')])), + ('a', 'b', 'c\x00c')) + self.assertEqual(ac_tester.str_converter_encoding('a', b'b', b'c\x00c'), ('a', 'b', 'c\x00c')) + + def test_py_buffer_converter(self): + with self.assertRaises(TypeError): + ac_tester.py_buffer_converter('a', 'b') + self.assertEqual(ac_tester.py_buffer_converter('abc', bytearray([1, 2, 3])), (b'abc', b'\x01\x02\x03')) + + def test_keywords(self): + self.assertEqual(ac_tester.keywords(1, 2), (1, 2)) + self.assertEqual(ac_tester.keywords(1, b=2), (1, 2)) + self.assertEqual(ac_tester.keywords(a=1, b=2), (1, 2)) + + def test_keywords_kwonly(self): + with self.assertRaises(TypeError): + ac_tester.keywords_kwonly(1, 2) + self.assertEqual(ac_tester.keywords_kwonly(1, b=2), (1, 2)) + self.assertEqual(ac_tester.keywords_kwonly(a=1, b=2), (1, 2)) + + def test_keywords_opt(self): + self.assertEqual(ac_tester.keywords_opt(1), (1, None, None)) + self.assertEqual(ac_tester.keywords_opt(1, 2), (1, 2, None)) + self.assertEqual(ac_tester.keywords_opt(1, 2, 3), (1, 2, 3)) + self.assertEqual(ac_tester.keywords_opt(1, b=2), (1, 2, None)) + self.assertEqual(ac_tester.keywords_opt(1, 2, c=3), (1, 2, 3)) + self.assertEqual(ac_tester.keywords_opt(a=1, c=3), (1, None, 3)) + self.assertEqual(ac_tester.keywords_opt(a=1, b=2, c=3), (1, 2, 3)) + + def test_keywords_opt_kwonly(self): + self.assertEqual(ac_tester.keywords_opt_kwonly(1), (1, None, None, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(1, 2), (1, 2, None, None)) + with self.assertRaises(TypeError): + ac_tester.keywords_opt_kwonly(1, 2, 3) + self.assertEqual(ac_tester.keywords_opt_kwonly(1, b=2), (1, 2, None, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(a=1, c=3), (1, None, 3, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(a=1, b=2, c=3, d=4), (1, 2, 3, 4)) + + def test_keywords_kwonly_opt(self): + self.assertEqual(ac_tester.keywords_kwonly_opt(1), (1, None, None)) + with self.assertRaises(TypeError): + ac_tester.keywords_kwonly_opt(1, 2) + self.assertEqual(ac_tester.keywords_kwonly_opt(1, b=2), (1, 2, None)) + self.assertEqual(ac_tester.keywords_kwonly_opt(a=1, c=3), (1, None, 3)) + self.assertEqual(ac_tester.keywords_kwonly_opt(a=1, b=2, c=3), (1, 2, 3)) + + def test_posonly_keywords(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords(1) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords(a=1, b=2) + self.assertEqual(ac_tester.posonly_keywords(1, 2), (1, 2)) + self.assertEqual(ac_tester.posonly_keywords(1, b=2), (1, 2)) + + def test_posonly_kwonly(self): + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly(1) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly(1, 2) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly(a=1, b=2) + self.assertEqual(ac_tester.posonly_kwonly(1, b=2), (1, 2)) + + def test_posonly_keywords_kwonly(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly(1) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly(a=1, b=2, c=3) + self.assertEqual(ac_tester.posonly_keywords_kwonly(1, 2, c=3), (1, 2, 3)) + self.assertEqual(ac_tester.posonly_keywords_kwonly(1, b=2, c=3), (1, 2, 3)) + + def test_posonly_keywords_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt(1) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2, 3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2, 3, 4), (1, 2, 3, 4)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, b=2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2, c=3), (1, 2, 3, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt(a=1, b=2, c=3, d=4) + self.assertEqual(ac_tester.posonly_keywords_opt(1, b=2, c=3, d=4), (1, 2, 3, 4)) + + def test_posonly_opt_keywords_opt(self): + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1), (1, None, None, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, 3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, 3, 4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt(1, b=2) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, c=3, d=4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt(a=1, b=2, c=3, d=4) + + def test_posonly_kwonly_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly_opt(1) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly_opt(1, 2) + self.assertEqual(ac_tester.posonly_kwonly_opt(1, b=2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_kwonly_opt(1, b=2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_kwonly_opt(1, b=2, c=3, d=4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly_opt(a=1, b=2, c=3, d=4) + + def test_posonly_opt_kwonly_opt(self): + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1), (1, None, None, None)) + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1, 2), (1, 2, None, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_kwonly_opt(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_kwonly_opt(1, b=2) + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4)) + + def test_posonly_keywords_kwonly_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1, 2) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1, b=2) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(a=1, b=2, c=3) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, 2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, b=2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4, None)) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, 2, c=3, d=4, e=5), (1, 2, 3, 4, 5)) + + def test_posonly_keywords_opt_kwonly_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt_kwonly_opt(1) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2), (1, 2, None, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, b=2), (1, 2, None, None, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, 3, 4) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt_kwonly_opt(a=1, b=2) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, b=2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, 3, d=4), (1, 2, 3, 4, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, 3, d=4, e=5), (1, 2, 3, 4, 5)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, c=3, d=4, e=5), (1, 2, 3, 4, 5)) + + def test_posonly_opt_keywords_opt_kwonly_opt(self): + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1), (1, None, None, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2), (1, 2, None, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, b=2) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, 3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, 3, d=4), (1, 2, 3, 4)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, 3, 4) + + def test_keyword_only_parameter(self): + with self.assertRaises(TypeError): + ac_tester.keyword_only_parameter() + with self.assertRaises(TypeError): + ac_tester.keyword_only_parameter(1) + self.assertEqual(ac_tester.keyword_only_parameter(a=1), (1,)) + + def test_posonly_vararg(self): + with self.assertRaises(TypeError): + ac_tester.posonly_vararg() + self.assertEqual(ac_tester.posonly_vararg(1, 2), (1, 2, ())) + self.assertEqual(ac_tester.posonly_vararg(1, b=2), (1, 2, ())) + self.assertEqual(ac_tester.posonly_vararg(1, 2, 3, 4), (1, 2, (3, 4))) + + def test_vararg_and_posonly(self): + with self.assertRaises(TypeError): + ac_tester.vararg_and_posonly() + with self.assertRaises(TypeError): + ac_tester.vararg_and_posonly(1, b=2) + self.assertEqual(ac_tester.vararg_and_posonly(1, 2, 3, 4), (1, (2, 3, 4))) + + def test_vararg(self): + with self.assertRaises(TypeError): + ac_tester.vararg() + with self.assertRaises(TypeError): + ac_tester.vararg(1, b=2) + self.assertEqual(ac_tester.vararg(1, 2, 3, 4), (1, (2, 3, 4))) + + def test_vararg_with_default(self): + with self.assertRaises(TypeError): + ac_tester.vararg_with_default() + self.assertEqual(ac_tester.vararg_with_default(1, b=False), (1, (), False)) + self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4), (1, (2, 3, 4), False)) + self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4, b=True), (1, (2, 3, 4), True)) + + def test_vararg_with_only_defaults(self): + self.assertEqual(ac_tester.vararg_with_only_defaults(), ((), None)) + self.assertEqual(ac_tester.vararg_with_only_defaults(b=2), ((), 2)) + self.assertEqual(ac_tester.vararg_with_only_defaults(1, b=2), ((1, ), 2)) + self.assertEqual(ac_tester.vararg_with_only_defaults(1, 2, 3, 4), ((1, 2, 3, 4), None)) + self.assertEqual(ac_tester.vararg_with_only_defaults(1, 2, 3, 4, b=5), ((1, 2, 3, 4), 5)) + + def test_gh_32092_oob(self): + ac_tester.gh_32092_oob(1, 2, 3, 4, kw1=5, kw2=6) + + def test_gh_32092_kw_pass(self): + ac_tester.gh_32092_kw_pass(1, 2, 3) + + def test_gh_99233_refcount(self): + arg = '*A unique string is not referenced by anywhere else.*' + arg_refcount_origin = sys.getrefcount(arg) + ac_tester.gh_99233_refcount(arg) + arg_refcount_after = sys.getrefcount(arg) + self.assertEqual(arg_refcount_origin, arg_refcount_after) + + def test_gh_99240_double_free(self): + expected_error = r'gh_99240_double_free\(\) argument 2 must be encoded string without null bytes, not str' + with self.assertRaisesRegex(TypeError, expected_error): + ac_tester.gh_99240_double_free('a', '\0b') + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 1ee3acd7076c9d..f10d72ea5547ee 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -752,6 +752,23 @@ def test_nonexisting_script(self): self.assertIn(": can't open file ", err) self.assertNotEqual(proc.returncode, 0) + @unittest.skipUnless(os.path.exists('/dev/fd/0'), 'requires /dev/fd platform') + @unittest.skipIf(sys.platform.startswith("freebsd") and + os.stat("/dev").st_dev == os.stat("/dev/fd").st_dev, + "Requires fdescfs mounted on /dev/fd on FreeBSD") + def test_script_as_dev_fd(self): + # GH-87235: On macOS passing a non-trivial script to /dev/fd/N can cause + # problems because all open /dev/fd/N file descriptors share the same + # offset. + script = 'print("12345678912345678912345")' + with os_helper.temp_dir() as work_dir: + script_name = _make_test_script(work_dir, 'script.py', script) + with open(script_name, "r") as fp: + p = spawn_python(f"/dev/fd/{fp.fileno()}", close_fds=False, pass_fds=(0,1,2,fp.fileno())) + out, err = p.communicate() + self.assertEqual(out, b"12345678912345678912345\n") + + def tearDownModule(): support.reap_children() diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py index 4e4d82314a9fb8..a64301f6974e6e 100644 --- a/Lib/test/test_code.py +++ b/Lib/test/test_code.py @@ -143,7 +143,7 @@ gc_collect) from test.support.script_helper import assert_python_ok from test.support import threading_helper -from opcode import opmap +from opcode import opmap, opname COPY_FREE_VARS = opmap['COPY_FREE_VARS'] @@ -192,7 +192,7 @@ def create_closure(__class__): def new_code(c): '''A new code object with a __class__ cell added to freevars''' - return c.replace(co_freevars=c.co_freevars + ('__class__',), co_code=bytes([COPY_FREE_VARS, 1])+c.co_code) + return c.replace(co_freevars=c.co_freevars + ('__class__',), co_code=bytes([COPY_FREE_VARS, 1, 0, 0])+c.co_code) def add_foreign_method(cls, name, f): code = new_code(f.__code__) @@ -339,15 +339,19 @@ def func(): self.assertEqual(list(new_code.co_lines()), []) def test_invalid_bytecode(self): - def foo(): pass - foo.__code__ = co = foo.__code__.replace(co_code=b'\xee\x00d\x00S\x00') + def foo(): + pass - with self.assertRaises(SystemError) as se: - foo() - self.assertEqual( - f"{co.co_filename}:{co.co_firstlineno}: unknown opcode 238", - str(se.exception)) + # assert that opcode 238 is invalid + self.assertEqual(opname[238], '<238>') + # change first opcode to 0xee (=238) + foo.__code__ = foo.__code__.replace( + co_code=b'\xee' + foo.__code__.co_code[1:]) + + msg = f"unknown opcode 238" + with self.assertRaisesRegex(SystemError, msg): + foo() @requires_debug_ranges() def test_co_positions_artificial_instructions(self): @@ -368,7 +372,7 @@ def test_co_positions_artificial_instructions(self): artificial_instructions = [] for instr, positions in zip( dis.get_instructions(code, show_caches=True), - code.co_positions(), + dis._get_co_positions(code, show_caches=True), strict=True ): # If any of the positions is None, then all have to @@ -695,9 +699,9 @@ def f(): co_firstlineno=42, co_code=bytes( [ - dis.opmap["RESUME"], 0, - dis.opmap["LOAD_ASSERTION_ERROR"], 0, - dis.opmap["RAISE_VARARGS"], 1, + dis.opmap["RESUME"], 0, 0, 0, + dis.opmap["LOAD_ASSERTION_ERROR"], 0, 0, 0, + dis.opmap["RAISE_VARARGS"], 1, 0, 0, ] ), co_linetable=bytes( diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index 32a704f4e97e41..e3add0c1ee926c 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -1552,6 +1552,12 @@ def test_builtin_encode(self): self.assertEqual("pyth\xf6n.org".encode("idna"), b"xn--pythn-mua.org") self.assertEqual("pyth\xf6n.org.".encode("idna"), b"xn--pythn-mua.org.") + def test_builtin_decode_length_limit(self): + with self.assertRaisesRegex(UnicodeError, "way too long"): + (b"xn--016c"+b"a"*1100).decode("idna") + with self.assertRaisesRegex(UnicodeError, "too long"): + (b"xn--016c"+b"a"*70).decode("idna") + def test_stream(self): r = codecs.getreader("idna")(io.BytesIO(b"abc")) r.read(3) diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 133096d25a44bc..d7b51be642e46f 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -310,8 +310,8 @@ def test_filename(self): def test_warning(self): # Test that the warning is only returned once. with warnings_helper.check_warnings( - (".*literal", SyntaxWarning), - (".*invalid", DeprecationWarning), + ('"is" with a literal', SyntaxWarning), + ("invalid escape sequence", SyntaxWarning), ) as w: compile_command(r"'\e' is 0") self.assertEqual(len(w.warnings), 2) @@ -321,9 +321,9 @@ def test_warning(self): warnings.simplefilter('error', SyntaxWarning) compile_command('1 is 1', symbol='exec') - # Check DeprecationWarning treated as an SyntaxError + # Check SyntaxWarning treated as an SyntaxError with warnings.catch_warnings(), self.assertRaises(SyntaxError): - warnings.simplefilter('error', DeprecationWarning) + warnings.simplefilter('error', SyntaxWarning) compile_command(r"'\e'", symbol='exec') def test_incomplete_warning(self): @@ -337,7 +337,7 @@ def test_invalid_warning(self): warnings.simplefilter('always') self.assertInvalid("'\\e' 1") self.assertEqual(len(w), 1) - self.assertEqual(w[0].category, DeprecationWarning) + self.assertEqual(w[0].category, SyntaxWarning) self.assertRegex(str(w[0].message), 'invalid escape sequence') self.assertEqual(w[0].filename, '<input>') diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index 1e398d6c3c7a3f..bfe18c7fc50330 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -810,6 +810,8 @@ def throw(self, typ, val=None, tb=None): def __await__(self): yield + self.validate_abstract_methods(Awaitable, '__await__') + non_samples = [None, int(), gen(), object()] for x in non_samples: self.assertNotIsInstance(x, Awaitable) @@ -860,6 +862,8 @@ def throw(self, typ, val=None, tb=None): def __await__(self): yield + self.validate_abstract_methods(Coroutine, '__await__', 'send', 'throw') + non_samples = [None, int(), gen(), object(), Bar()] for x in non_samples: self.assertNotIsInstance(x, Coroutine) @@ -1602,6 +1606,7 @@ def __len__(self): containers = [ seq, ItemsView({1: nan, 2: obj}), + KeysView({1: nan, 2: obj}), ValuesView({1: nan, 2: obj}) ] for container in containers: @@ -1865,6 +1870,8 @@ def test_MutableMapping_subclass(self): mymap['red'] = 5 self.assertIsInstance(mymap.keys(), Set) self.assertIsInstance(mymap.keys(), KeysView) + self.assertIsInstance(mymap.values(), Collection) + self.assertIsInstance(mymap.values(), ValuesView) self.assertIsInstance(mymap.items(), Set) self.assertIsInstance(mymap.items(), ItemsView) @@ -1940,6 +1947,7 @@ def test_ByteString(self): self.assertFalse(issubclass(sample, ByteString)) self.assertNotIsInstance(memoryview(b""), ByteString) self.assertFalse(issubclass(memoryview, ByteString)) + self.validate_abstract_methods(ByteString, '__getitem__', '__len__') def test_MutableSequence(self): for sample in [tuple, str, bytes]: diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 27f91dbcd63aa5..3d09b962d602b3 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -670,7 +670,7 @@ def test_merge_code_attrs(self): self.assertIs(f1.__code__.co_linetable, f2.__code__.co_linetable) @support.cpython_only - def test_strip_unused_consts(self): + def test_remove_unused_consts(self): def f(): "docstring" if True: @@ -679,7 +679,41 @@ def f(): return "unused" self.assertEqual(f.__code__.co_consts, - ("docstring", True, "used")) + ("docstring", "used")) + + @support.cpython_only + def test_remove_unused_consts_no_docstring(self): + # the first item (None for no docstring in this case) is + # always retained. + def f(): + if True: + return "used" + else: + return "unused" + + self.assertEqual(f.__code__.co_consts, + (None, "used")) + + @support.cpython_only + def test_remove_unused_consts_extended_args(self): + N = 1000 + code = ["def f():\n"] + code.append("\ts = ''\n") + code.append("\tfor i in range(1):\n") + for i in range(N): + code.append(f"\t\tif True: s += 't{i}'\n") + code.append(f"\t\tif False: s += 'f{i}'\n") + code.append("\treturn s\n") + + code = "".join(code) + g = {} + eval(compile(code, "file.py", "exec"), g) + exec(code, g) + f = g['f'] + expected = tuple([None, '', 1] + [f't{i}' for i in range(N)]) + self.assertEqual(f.__code__.co_consts, expected) + expected = "".join(expected[3:]) + self.assertEqual(expected, f()) # Stripping unused constants is not a strict requirement for the # Python semantics, it's a more an implementation detail. @@ -1112,6 +1146,17 @@ def test_compare_positions(self): with self.subTest(source): self.assertEqual(actual_positions, expected_positions) + def test_if_expression_expression_empty_block(self): + # See regression in gh-99708 + exprs = [ + "assert (False if 1 else True)", + "def f():\n\tif not (False if 1 else True): raise AssertionError", + "def f():\n\tif not (False if 1 else True): return 12", + ] + for expr in exprs: + with self.subTest(expr=expr): + compile(expr, "<single>", "exec") + @requires_debug_ranges() class TestSourcePositions(unittest.TestCase): @@ -1130,7 +1175,7 @@ def check_positions_against_ast(self, snippet): class SourceOffsetVisitor(ast.NodeVisitor): def generic_visit(self, node): super().generic_visit(node) - if not isinstance(node, ast.expr) and not isinstance(node, ast.stmt): + if not isinstance(node, (ast.expr, ast.stmt, ast.pattern)): return lines.add(node.lineno) end_lines.add(node.end_lineno) @@ -1160,7 +1205,9 @@ def assertOpcodeSourcePositionIs(self, code, opcode, line, end_line, column, end_column, occurrence=1): for instr, position in zip( - dis.Bytecode(code, show_caches=True), code.co_positions(), strict=True + dis.Bytecode(code, show_caches=True), + dis._get_co_positions(code, show_caches=True), + strict=True ): if instr.opname == opcode: occurrence -= 1 @@ -1408,6 +1455,100 @@ async def f(): self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', line=2, end_line=7, column=4, end_column=36, occurrence=1) + def test_matchcase_sequence(self): + snippet = """\ +match x: + case a, b: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_SEQUENCE', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'UNPACK_SEQUENCE', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=13, occurrence=2) + + def test_matchcase_sequence_wildcard(self): + snippet = """\ +match x: + case a, *b, c: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_SEQUENCE', + line=2, end_line=2, column=9, end_column=17, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'UNPACK_EX', + line=2, end_line=2, column=9, end_column=17, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=17, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=17, occurrence=2) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=17, occurrence=3) + + def test_matchcase_mapping(self): + snippet = """\ +match x: + case {"a" : a, "b": b}: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_MAPPING', + line=2, end_line=2, column=9, end_column=26, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_KEYS', + line=2, end_line=2, column=9, end_column=26, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=26, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=26, occurrence=2) + + def test_matchcase_mapping_wildcard(self): + snippet = """\ +match x: + case {"a" : a, "b": b, **c}: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_MAPPING', + line=2, end_line=2, column=9, end_column=31, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_KEYS', + line=2, end_line=2, column=9, end_column=31, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=31, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=31, occurrence=2) + + def test_matchcase_class(self): + snippet = """\ +match x: + case C(a, b): + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_CLASS', + line=2, end_line=2, column=9, end_column=16, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'UNPACK_SEQUENCE', + line=2, end_line=2, column=9, end_column=16, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=16, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=16, occurrence=2) + + def test_matchcase_or(self): + snippet = """\ +match x: + case C(1) | C(2): + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_CLASS', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_CLASS', + line=2, end_line=2, column=16, end_column=20, occurrence=2) + def test_very_long_line_end_offset(self): # Make sure we get the correct column offset for offsets # too large to store in a byte. @@ -1645,6 +1786,13 @@ def test_stack_3050(self): # This raised on 3.10.0 to 3.10.5 compile(code, "<foo>", "single") + def test_stack_3050_2(self): + M = 3050 + args = ", ".join(f"arg{i}:type{i}" for i in range(M)) + code = f"def f({args}):\n pass" + # This raised on 3.10.0 to 3.10.5 + compile(code, "<foo>", "single") + class TestStackSizeStability(unittest.TestCase): # Check that repeating certain snippets doesn't increase the stack size diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py new file mode 100644 index 00000000000000..f2e14c1e628c01 --- /dev/null +++ b/Lib/test/test_compiler_codegen.py @@ -0,0 +1,50 @@ + +from test.support.bytecode_helper import CodegenTestCase + +# Tests for the code-generation stage of the compiler. +# Examine the un-optimized code generated from the AST. + +class IsolatedCodeGenTests(CodegenTestCase): + + def codegen_test(self, snippet, expected_insts): + import ast + a = ast.parse(snippet, "my_file.py", "exec"); + insts = self.generate_code(a) + self.assertInstructionsMatch(insts, expected_insts) + + def test_if_expression(self): + snippet = "42 if True else 24" + false_lbl = self.Label() + expected = [ + ('RESUME', 0, 0), + ('LOAD_CONST', 0, 1), + ('POP_JUMP_IF_FALSE', false_lbl := self.Label(), 1), + ('LOAD_CONST', 1, 1), + ('JUMP', exit_lbl := self.Label()), + false_lbl, + ('LOAD_CONST', 2, 1), + exit_lbl, + ('POP_TOP', None), + ] + self.codegen_test(snippet, expected) + + def test_for_loop(self): + snippet = "for x in l:\n\tprint(x)" + false_lbl = self.Label() + expected = [ + ('RESUME', 0, 0), + ('LOAD_NAME', 0, 1), + ('GET_ITER', None, 1), + loop_lbl := self.Label(), + ('FOR_ITER', exit_lbl := self.Label(), 1), + ('STORE_NAME', None, 1), + ('PUSH_NULL', None, 2), + ('LOAD_NAME', None, 2), + ('LOAD_NAME', None, 2), + ('CALL', None, 2), + ('POP_TOP', None), + ('JUMP', loop_lbl), + exit_lbl, + ('END_FOR', None), + ] + self.codegen_test(snippet, expected) diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py index e046577b935e92..51ba151505fb54 100644 --- a/Lib/test/test_complex.py +++ b/Lib/test/test_complex.py @@ -306,15 +306,10 @@ def test_conjugate(self): self.assertClose(complex(5.3, 9.8).conjugate(), 5.3-9.8j) def test_constructor(self): - class OS: + class NS: def __init__(self, value): self.value = value def __complex__(self): return self.value - class NS(object): - def __init__(self, value): self.value = value - def __complex__(self): return self.value - self.assertEqual(complex(OS(1+10j)), 1+10j) self.assertEqual(complex(NS(1+10j)), 1+10j) - self.assertRaises(TypeError, complex, OS(None)) self.assertRaises(TypeError, complex, NS(None)) self.assertRaises(TypeError, complex, {}) self.assertRaises(TypeError, complex, NS(1.5)) diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py index f4d91c16868986..cc95a319d35d78 100644 --- a/Lib/test/test_copy.py +++ b/Lib/test/test_copy.py @@ -88,9 +88,7 @@ def __getattribute__(self, name): # Type-specific _copy_xxx() methods def test_copy_atomic(self): - class Classic: - pass - class NewStyle(object): + class NewStyle: pass def f(): pass @@ -100,7 +98,7 @@ class WithMetaclass(metaclass=abc.ABCMeta): 42, 2**100, 3.14, True, False, 1j, "hello", "hello\u1234", f.__code__, b"world", bytes(range(256)), range(10), slice(1, 10, 2), - NewStyle, Classic, max, WithMetaclass, property()] + NewStyle, max, WithMetaclass, property()] for x in tests: self.assertIs(copy.copy(x), x) @@ -350,15 +348,13 @@ def __getattribute__(self, name): # Type-specific _deepcopy_xxx() methods def test_deepcopy_atomic(self): - class Classic: - pass - class NewStyle(object): + class NewStyle: pass def f(): pass tests = [None, 42, 2**100, 3.14, True, False, 1j, "hello", "hello\u1234", f.__code__, - NewStyle, range(10), Classic, max, property()] + NewStyle, range(10), max, property()] for x in tests: self.assertIs(copy.deepcopy(x), x) diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py index f91c9cc47741b5..43a3ff0536fe28 100644 --- a/Lib/test/test_coroutines.py +++ b/Lib/test/test_coroutines.py @@ -2418,7 +2418,8 @@ class UnawaitedWarningDuringShutdownTest(unittest.TestCase): def test_unawaited_warning_during_shutdown(self): code = ("import asyncio\n" "async def f(): pass\n" - "asyncio.gather(f())\n") + "async def t(): asyncio.gather(f())\n" + "asyncio.run(t())\n") assert_python_ok("-c", code) code = ("import sys\n" diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index d64bff13a44e87..8289ddb1c3a54f 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -762,6 +762,10 @@ def test_write_field_not_in_field_names_raise(self): dictrow = {'f0': 0, 'f1': 1, 'f2': 2, 'f3': 3} self.assertRaises(ValueError, csv.DictWriter.writerow, writer, dictrow) + # see bpo-44512 (differently cased 'raise' should not result in 'ignore') + writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="RAISE") + self.assertRaises(ValueError, csv.DictWriter.writerow, writer, dictrow) + def test_write_field_not_in_field_names_ignore(self): fileobj = StringIO() writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="ignore") @@ -769,6 +773,10 @@ def test_write_field_not_in_field_names_ignore(self): csv.DictWriter.writerow(writer, dictrow) self.assertEqual(fileobj.getvalue(), "1,2\r\n") + # bpo-44512 + writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="IGNORE") + csv.DictWriter.writerow(writer, dictrow) + def test_dict_reader_fieldnames_accepts_iter(self): fieldnames = ["a", "b", "c"] f = StringIO() diff --git a/Lib/test/test_ctypes/test_struct_fields.py b/Lib/test/test_ctypes/test_struct_fields.py index ee8415f3e630c2..fefeaea1496a3e 100644 --- a/Lib/test/test_ctypes/test_struct_fields.py +++ b/Lib/test/test_ctypes/test_struct_fields.py @@ -54,6 +54,15 @@ class X(Structure): x.char = b'a\0b\0' self.assertEqual(bytes(x), b'a\x00###') + def test_gh99275(self): + class BrokenStructure(Structure): + def __init_subclass__(cls, **kwargs): + cls._fields_ = [] # This line will fail, `stgdict` is not ready + + with self.assertRaisesRegex(TypeError, + 'ctypes state is not initialized'): + class Subclass(BrokenStructure): ... + # __set__ and __get__ should raise a TypeError in case their self # argument is not a ctype instance. def test___set__(self): diff --git a/Lib/test/test_ctypes/test_structures.py b/Lib/test/test_ctypes/test_structures.py index 13c0470ba2238c..df39dc7f50d3f7 100644 --- a/Lib/test/test_ctypes/test_structures.py +++ b/Lib/test/test_ctypes/test_structures.py @@ -332,13 +332,13 @@ class Person(Structure): cls, msg = self.get_except(Person, b"Someone", (1, 2)) self.assertEqual(cls, RuntimeError) self.assertEqual(msg, - "(Phone) <class 'TypeError'>: " + "(Phone) TypeError: " "expected bytes, int found") cls, msg = self.get_except(Person, b"Someone", (b"a", b"b", b"c")) self.assertEqual(cls, RuntimeError) self.assertEqual(msg, - "(Phone) <class 'TypeError'>: too many initializers") + "(Phone) TypeError: too many initializers") def test_huge_field_name(self): # issue12881: segfault with large structure field names diff --git a/Lib/test/test_dataclasses.py b/Lib/test/test_dataclasses.py index 01e66840085bff..a09f36c3dac1ce 100644 --- a/Lib/test/test_dataclasses.py +++ b/Lib/test/test_dataclasses.py @@ -3970,7 +3970,7 @@ class Date(A): day: 'int' self.assertTrue(inspect.isabstract(Date)) - msg = 'class Date without an implementation for abstract method foo' + msg = "class Date without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, Date) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index 40cf81ff0b33f5..cbc020d1d3904a 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -3261,12 +3261,8 @@ def __get__(self, object, otype): if otype: otype = otype.__name__ return 'object=%s; type=%s' % (object, otype) - class OldClass: + class NewClass: __doc__ = DocDescr() - class NewClass(object): - __doc__ = DocDescr() - self.assertEqual(OldClass.__doc__, 'object=None; type=OldClass') - self.assertEqual(OldClass().__doc__, 'object=OldClass instance; type=OldClass') self.assertEqual(NewClass.__doc__, 'object=None; type=NewClass') self.assertEqual(NewClass().__doc__, 'object=NewClass instance; type=NewClass') diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py index 7c48d800cd88bd..924f4a6829e19c 100644 --- a/Lib/test/test_dictviews.py +++ b/Lib/test/test_dictviews.py @@ -338,6 +338,9 @@ def test_abc_registry(self): self.assertIsInstance(d.values(), collections.abc.ValuesView) self.assertIsInstance(d.values(), collections.abc.MappingView) self.assertIsInstance(d.values(), collections.abc.Sized) + self.assertIsInstance(d.values(), collections.abc.Collection) + self.assertIsInstance(d.values(), collections.abc.Iterable) + self.assertIsInstance(d.values(), collections.abc.Container) self.assertIsInstance(d.items(), collections.abc.ItemsView) self.assertIsInstance(d.items(), collections.abc.MappingView) diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index c986a6b538f2ac..6edec0fe7b0c05 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -143,10 +143,10 @@ def bug708901(): %3d CALL 2 GET_ITER - >> FOR_ITER 2 (to 38) + >> FOR_ITER 4 (to 56) STORE_FAST 0 (res) -%3d JUMP_BACKWARD 4 (to 30) +%3d JUMP_BACKWARD 7 (to 42) %3d >> END_FOR LOAD_CONST 0 (None) @@ -168,13 +168,13 @@ def bug1333982(x=[]): %3d RESUME 0 %3d LOAD_ASSERTION_ERROR - LOAD_CONST 2 (<code object <listcomp> at 0x..., file "%s", line %d>) + LOAD_CONST 1 (<code object <listcomp> at 0x..., file "%s", line %d>) MAKE_FUNCTION 0 LOAD_FAST 0 (x) GET_ITER CALL 0 -%3d LOAD_CONST 3 (1) +%3d LOAD_CONST 2 (1) %3d BINARY_OP 0 (+) CALL 0 @@ -203,11 +203,11 @@ def bug42562(): # Extended arg followed by NOP code_bug_45757 = bytes([ - 0x90, 0x01, # EXTENDED_ARG 0x01 - 0x09, 0xFF, # NOP 0xFF - 0x90, 0x01, # EXTENDED_ARG 0x01 - 0x64, 0x29, # LOAD_CONST 0x29 - 0x53, 0x00, # RETURN_VALUE 0x00 + 0x90, 0x01, 0x0, 0x0, # EXTENDED_ARG 0x01 + 0x09, 0xFF, 0x0, 0x0, # NOP 0xFF + 0x90, 0x01, 0x0, 0x0, # EXTENDED_ARG 0x01 + 0x64, 0x29, 0x0, 0x0, # LOAD_CONST 0x29 + 0x53, 0x00, 0x0, 0x0, # RETURN_VALUE 0x00 ]) dis_bug_45757 = """\ @@ -220,18 +220,18 @@ def bug42562(): # [255, 255, 255, 252] is -4 in a 4 byte signed integer bug46724 = bytes([ - opcode.EXTENDED_ARG, 255, - opcode.EXTENDED_ARG, 255, - opcode.EXTENDED_ARG, 255, - opcode.opmap['JUMP_FORWARD'], 252, + opcode.EXTENDED_ARG, 255, 0x0, 0x0, + opcode.EXTENDED_ARG, 255, 0x0, 0x0, + opcode.EXTENDED_ARG, 255, 0x0, 0x0, + opcode.opmap['JUMP_FORWARD'], 252, 0x0, 0x0, ]) dis_bug46724 = """\ - >> EXTENDED_ARG 255 + EXTENDED_ARG 255 EXTENDED_ARG 65535 - EXTENDED_ARG 16777215 - JUMP_FORWARD -4 (to 0) + >> EXTENDED_ARG 16777215 + JUMP_FORWARD -4 (to 8) """ _BIG_LINENO_FORMAT = """\ @@ -348,7 +348,7 @@ def bug42562(): BINARY_OP 13 (+=) STORE_NAME 0 (x) - 2 JUMP_BACKWARD 6 (to 8) + 2 JUMP_BACKWARD 11 (to 16) """ dis_traceback = """\ @@ -367,7 +367,7 @@ def bug42562(): %3d LOAD_GLOBAL 0 (Exception) CHECK_EXC_MATCH - POP_JUMP_IF_FALSE 23 (to 82) + POP_JUMP_IF_FALSE 37 (to 134) STORE_FAST 0 (e) %3d LOAD_FAST 0 (e) @@ -451,7 +451,7 @@ def _with(c): %3d >> PUSH_EXC_INFO WITH_EXCEPT_START - POP_JUMP_IF_TRUE 1 (to 46) + POP_JUMP_IF_TRUE 2 (to 84) RERAISE 2 >> POP_TOP POP_EXCEPT @@ -490,10 +490,10 @@ async def _asyncwith(c): BEFORE_ASYNC_WITH GET_AWAITABLE 1 LOAD_CONST 0 (None) - >> SEND 3 (to 22) + >> SEND 6 (to 44) YIELD_VALUE 3 RESUME 3 - JUMP_BACKWARD_NO_INTERRUPT 4 (to 14) + JUMP_BACKWARD_NO_INTERRUPT 8 (to 28) >> POP_TOP %3d LOAD_CONST 1 (1) @@ -505,10 +505,10 @@ async def _asyncwith(c): CALL 2 GET_AWAITABLE 2 LOAD_CONST 0 (None) - >> SEND 3 (to 56) + >> SEND 6 (to 104) YIELD_VALUE 2 RESUME 3 - JUMP_BACKWARD_NO_INTERRUPT 4 (to 48) + JUMP_BACKWARD_NO_INTERRUPT 8 (to 88) >> POP_TOP %3d LOAD_CONST 2 (2) @@ -517,19 +517,19 @@ async def _asyncwith(c): RETURN_VALUE %3d >> CLEANUP_THROW - JUMP_BACKWARD 24 (to 22) + JUMP_BACKWARD 44 (to 44) >> CLEANUP_THROW - JUMP_BACKWARD 9 (to 56) + JUMP_BACKWARD 18 (to 104) >> PUSH_EXC_INFO WITH_EXCEPT_START GET_AWAITABLE 2 LOAD_CONST 0 (None) - >> SEND 4 (to 92) + >> SEND 8 (to 176) YIELD_VALUE 6 RESUME 3 - JUMP_BACKWARD_NO_INTERRUPT 4 (to 82) + JUMP_BACKWARD_NO_INTERRUPT 8 (to 156) >> CLEANUP_THROW - >> POP_JUMP_IF_TRUE 1 (to 96) + >> POP_JUMP_IF_TRUE 2 (to 184) RERAISE 2 >> POP_TOP POP_EXCEPT @@ -543,8 +543,10 @@ async def _asyncwith(c): >> COPY 3 POP_EXCEPT RERAISE 1 + >> STOPITERATION_ERROR + RERAISE 1 ExceptionTable: -6 rows +12 rows """ % (_asyncwith.__code__.co_firstlineno, _asyncwith.__code__.co_firstlineno + 1, _asyncwith.__code__.co_firstlineno + 2, @@ -693,13 +695,13 @@ def foo(x): %3d RESUME 0 BUILD_LIST 0 LOAD_FAST 0 (.0) - >> FOR_ITER 7 (to 26) + >> FOR_ITER 13 (to 48) STORE_FAST 1 (z) LOAD_DEREF 2 (x) LOAD_FAST 1 (z) BINARY_OP 0 (+) LIST_APPEND 2 - JUMP_BACKWARD 9 (to 8) + JUMP_BACKWARD 16 (to 16) >> END_FOR RETURN_VALUE """ % (dis_nested_1, @@ -713,17 +715,17 @@ def load_test(x, y=0): return a, b dis_load_test_quickened_code = """\ -%3d 0 RESUME_QUICK 0 +%3d 0 RESUME 0 -%3d 2 LOAD_FAST__LOAD_FAST 0 (x) - 4 LOAD_FAST 1 (y) - 6 STORE_FAST__STORE_FAST 3 (b) - 8 STORE_FAST__LOAD_FAST 2 (a) +%3d 4 LOAD_FAST__LOAD_FAST 0 (x) + 8 LOAD_FAST 1 (y) + 12 STORE_FAST__STORE_FAST 3 (b) + 16 STORE_FAST__LOAD_FAST 2 (a) -%3d 10 LOAD_FAST__LOAD_FAST 2 (a) - 12 LOAD_FAST 3 (b) - 14 BUILD_TUPLE 2 - 16 RETURN_VALUE +%3d 20 LOAD_FAST__LOAD_FAST 2 (a) + 24 LOAD_FAST 3 (b) + 28 BUILD_TUPLE 2 + 32 RETURN_VALUE """ % (load_test.__code__.co_firstlineno, load_test.__code__.co_firstlineno + 1, load_test.__code__.co_firstlineno + 2) @@ -733,22 +735,22 @@ def loop_test(): load_test(i) dis_loop_test_quickened_code = """\ -%3d RESUME_QUICK 0 +%3d RESUME 0 %3d BUILD_LIST 0 LOAD_CONST 1 ((1, 2, 3)) LIST_EXTEND 1 LOAD_CONST 2 (3) - BINARY_OP_ADAPTIVE 5 (*) + BINARY_OP 5 (*) GET_ITER - >> FOR_ITER_LIST 15 (to 50) + >> FOR_ITER_LIST 21 (to 78) STORE_FAST 0 (i) %3d LOAD_GLOBAL_MODULE 1 (NULL + load_test) LOAD_FAST 0 (i) CALL_PY_WITH_DEFAULTS 1 POP_TOP - JUMP_BACKWARD_QUICK 17 (to 16) + JUMP_BACKWARD 24 (to 30) %3d >> END_FOR LOAD_CONST 0 (None) @@ -764,17 +766,17 @@ def extended_arg_quick(): dis_extended_arg_quick_code = """\ %3d 0 RESUME 0 -%3d 2 LOAD_CONST 1 (Ellipsis) - 4 EXTENDED_ARG 1 - 6 UNPACK_EX 256 - 8 STORE_FAST 0 (_) - 10 STORE_FAST 0 (_) - 12 LOAD_CONST 0 (None) - 14 RETURN_VALUE +%3d 4 LOAD_CONST 1 (Ellipsis) + 8 EXTENDED_ARG 1 + 12 UNPACK_EX 256 + 16 STORE_FAST 0 (_) + 20 STORE_FAST 0 (_) + 24 LOAD_CONST 0 (None) + 28 RETURN_VALUE """% (extended_arg_quick.__code__.co_firstlineno, extended_arg_quick.__code__.co_firstlineno + 1,) -QUICKENING_WARMUP_DELAY = 8 +ADAPTIVE_WARMUP_DELAY = 2 class DisTestBase(unittest.TestCase): "Common utilities for DisTests and TestDisTraceback" @@ -968,20 +970,21 @@ def expected(count, w): %*d LOAD_CONST 1 (1) %*d BINARY_OP 0 (+) %*d STORE_FAST 0 (x) -''' % (w, 10*i + 2, w, 10*i + 4, w, 10*i + 6, w, 10*i + 10) +''' % (w, 18*i + 4, w, 18*i + 8, w, 18*i + 12, w, 18*i + 18) for i in range(count)] s += ['''\ 3 %*d LOAD_FAST 0 (x) %*d RETURN_VALUE -''' % (w, 10*count + 2, w, 10*count + 4)] +''' % (w, 18*count + 4, w, 18*count + 8)] s[1] = ' 2' + s[1][3:] return ''.join(s) for i in range(1, 5): - self.do_disassembly_test(func(i), expected(i, 4), True) - self.do_disassembly_test(func(999), expected(999, 4), True) - self.do_disassembly_test(func(1000), expected(1000, 5), True) + with self.subTest(count=i, w=4): + self.do_disassembly_test(func(i), expected(i, 4), True) + self.do_disassembly_test(func(554), expected(554, 4), True) + self.do_disassembly_test(func(555), expected(555, 5), True) def test_disassemble_str(self): self.do_disassembly_test(expr_str, dis_expr_str) @@ -1079,7 +1082,7 @@ def check(expected, **kwargs): check(dis_nested_2) @staticmethod - def code_quicken(f, times=QUICKENING_WARMUP_DELAY): + def code_quicken(f, times=ADAPTIVE_WARMUP_DELAY): for _ in range(times): f() @@ -1092,12 +1095,12 @@ def test_super_instructions(self): @cpython_only def test_binary_specialize(self): binary_op_quicken = """\ - 0 0 RESUME_QUICK 0 + 0 0 RESUME 0 - 1 2 LOAD_NAME 0 (a) - 4 LOAD_NAME 1 (b) - 6 %s - 10 RETURN_VALUE + 1 4 LOAD_NAME 0 (a) + 8 LOAD_NAME 1 (b) + 12 %s + 18 RETURN_VALUE """ co_int = compile('a + b', "<int>", "eval") self.code_quicken(lambda: exec(co_int, {}, {'a': 1, 'b': 2})) @@ -1110,12 +1113,12 @@ def test_binary_specialize(self): self.do_disassembly_compare(got, binary_op_quicken % "BINARY_OP_ADD_UNICODE 0 (+)", True) binary_subscr_quicken = """\ - 0 0 RESUME_QUICK 0 + 0 0 RESUME 0 - 1 2 LOAD_NAME 0 (a) - 4 LOAD_CONST 0 (0) - 6 %s - 16 RETURN_VALUE + 1 4 LOAD_NAME 0 (a) + 8 LOAD_CONST 0 (0) + 12 %s + 24 RETURN_VALUE """ co_list = compile('a[0]', "<list>", "eval") self.code_quicken(lambda: exec(co_list, {}, {'a': [0]})) @@ -1130,11 +1133,11 @@ def test_binary_specialize(self): @cpython_only def test_load_attr_specialize(self): load_attr_quicken = """\ - 0 0 RESUME_QUICK 0 + 0 0 RESUME 0 - 1 2 LOAD_CONST 0 ('a') - 4 LOAD_ATTR_SLOT 0 (__class__) - 24 RETURN_VALUE + 1 4 LOAD_CONST 0 ('a') + 8 LOAD_ATTR_SLOT 0 (__class__) + 30 RETURN_VALUE """ co = compile("'a'.__class__", "", "eval") self.code_quicken(lambda: exec(co, {}, {})) @@ -1144,7 +1147,7 @@ def test_load_attr_specialize(self): @cpython_only def test_call_specialize(self): call_quicken = """\ - 0 RESUME_QUICK 0 + 0 RESUME 0 1 PUSH_NULL LOAD_NAME 0 (str) @@ -1190,7 +1193,7 @@ def test_show_caches(self): for quickened in (False, True): for adaptive in (False, True): with self.subTest(f"{quickened=}, {adaptive=}"): - if quickened and adaptive: + if adaptive: pattern = r"^(\w+: \d+)?$" else: pattern = r"^(\w+: 0)?$" @@ -1198,11 +1201,10 @@ def test_show_caches(self): for cache in caches: self.assertRegex(cache, pattern) total_caches = 23 - empty_caches = 8 if adaptive and quickened else total_caches + empty_caches = 8 self.assertEqual(caches.count(""), empty_caches) self.assertEqual(len(caches), total_caches) - class DisWithFileTests(DisTests): # Run the tests again, using the file arg instead of print @@ -1445,9 +1447,9 @@ def jumpy(): # End fodder for opinfo generation tests expected_outer_line = 1 _line_offset = outer.__code__.co_firstlineno - 1 -code_object_f = outer.__code__.co_consts[3] +code_object_f = outer.__code__.co_consts[1] expected_f_line = code_object_f.co_firstlineno - _line_offset -code_object_inner = code_object_f.co_consts[3] +code_object_inner = code_object_f.co_consts[1] expected_inner_line = code_object_inner.co_firstlineno - _line_offset expected_jumpy_line = 1 @@ -1484,21 +1486,21 @@ def _prepare_test_cases(): Instruction(opname='MAKE_CELL', opcode=135, arg=0, argval='a', argrepr='a', offset=0, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_CELL', opcode=135, arg=1, argval='b', argrepr='b', offset=2, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='RESUME', opcode=151, arg=0, argval=0, argrepr='', offset=4, starts_line=1, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=(3, 4), argrepr='(3, 4)', offset=6, starts_line=2, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=(3, 4), argrepr='(3, 4)', offset=6, starts_line=2, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=0, argval='a', argrepr='a', offset=8, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=1, argval='b', argrepr='b', offset=10, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=14, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=code_object_f, argrepr=repr(code_object_f), offset=14, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_FUNCTION', opcode=132, arg=9, argval=9, argrepr='defaults, closure', offset=16, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='NULL + print', offset=20, starts_line=7, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=0, argval='a', argrepr='a', offset=32, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=1, argval='b', argrepr='b', offset=34, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=38, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=1, argrepr='1', offset=38, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=40, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Hello world!', argrepr="'Hello world!'", offset=44, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='Hello world!', argrepr="'Hello world!'", offset=44, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=7, argval=7, argrepr='', offset=46, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=56, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=58, starts_line=8, is_jump_target=False, positions=None), @@ -1510,13 +1512,13 @@ def _prepare_test_cases(): Instruction(opname='MAKE_CELL', opcode=135, arg=0, argval='c', argrepr='c', offset=2, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_CELL', opcode=135, arg=1, argval='d', argrepr='d', offset=4, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='RESUME', opcode=151, arg=0, argval=0, argrepr='', offset=6, starts_line=2, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval=(5, 6), argrepr='(5, 6)', offset=8, starts_line=3, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=(5, 6), argrepr='(5, 6)', offset=8, starts_line=3, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=3, argval='a', argrepr='a', offset=10, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=4, argval='b', argrepr='b', offset=12, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=0, argval='c', argrepr='c', offset=14, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=1, argval='d', argrepr='d', offset=16, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_FUNCTION', opcode=132, arg=9, argval=9, argrepr='defaults, closure', offset=22, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=24, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='NULL + print', offset=26, starts_line=5, is_jump_target=False, positions=None), @@ -1548,119 +1550,119 @@ def _prepare_test_cases(): expected_opinfo_jumpy = [ Instruction(opname='RESUME', opcode=151, arg=0, argval=0, argrepr='', offset=0, starts_line=1, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='range', argrepr='NULL + range', offset=2, starts_line=3, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=14, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=16, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=26, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='FOR_ITER', opcode=93, arg=30, argval=92, argrepr='to 92', offset=28, starts_line=None, is_jump_target=True, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=32, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=34, starts_line=4, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=46, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=48, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=60, starts_line=5, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=62, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=64, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=74, argrepr='to 74', offset=70, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=23, argval=28, argrepr='to 28', offset=72, starts_line=6, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=74, starts_line=7, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=76, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=78, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=88, argrepr='to 88', offset=84, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=30, argval=28, argrepr='to 28', offset=86, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=88, starts_line=8, is_jump_target=True, positions=None), - Instruction(opname='JUMP_FORWARD', opcode=110, arg=14, argval=120, argrepr='to 120', offset=90, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='END_FOR', opcode=4, arg=None, argval=None, argrepr='', offset=92, starts_line=3, is_jump_target=True, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=94, starts_line=10, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=106, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=108, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=118, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST_CHECK', opcode=127, arg=0, argval='i', argrepr='i', offset=120, starts_line=11, is_jump_target=True, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=35, argval=194, argrepr='to 194', offset=122, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=124, starts_line=12, is_jump_target=True, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=136, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=138, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=148, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=150, starts_line=13, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=152, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='BINARY_OP', opcode=122, arg=23, argval=23, argrepr='-=', offset=154, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=158, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=160, starts_line=14, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=162, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=164, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=174, argrepr='to 174', offset=170, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=27, argval=120, argrepr='to 120', offset=172, starts_line=15, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=174, starts_line=16, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=176, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=178, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=188, argrepr='to 188', offset=184, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_FORWARD', opcode=110, arg=16, argval=220, argrepr='to 220', offset=186, starts_line=17, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=188, starts_line=11, is_jump_target=True, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=194, argrepr='to 194', offset=190, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=35, argval=124, argrepr='to 124', offset=192, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=194, starts_line=19, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=206, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=208, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='NOP', opcode=9, arg=None, argval=None, argrepr='', offset=220, starts_line=20, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=222, starts_line=21, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=224, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='BINARY_OP', opcode=122, arg=11, argval=11, argrepr='/', offset=226, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=230, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=232, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='BEFORE_WITH', opcode=53, arg=None, argval=None, argrepr='', offset=234, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=236, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=238, starts_line=26, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=250, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=252, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=262, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=264, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=266, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=268, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=2, argval=2, argrepr='', offset=270, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=280, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=282, starts_line=28, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=294, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=296, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=306, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=308, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=310, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=312, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='WITH_EXCEPT_START', opcode=49, arg=None, argval=None, argrepr='', offset=314, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=320, argrepr='to 320', offset=316, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=2, argval=2, argrepr='', offset=318, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=320, starts_line=None, is_jump_target=True, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=322, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=324, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='range', argrepr='NULL + range', offset=4, starts_line=3, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=18, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=22, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=34, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='FOR_ITER', opcode=93, arg=47, argval=138, argrepr='to 138', offset=38, starts_line=None, is_jump_target=True, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=44, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=48, starts_line=4, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=62, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=66, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=78, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=82, starts_line=5, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=86, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=90, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=2, argval=106, argrepr='to 106', offset=98, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=34, argval=38, argrepr='to 38', offset=102, starts_line=6, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=106, starts_line=7, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=110, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=114, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=2, argval=130, argrepr='to 130', offset=122, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=46, argval=38, argrepr='to 38', offset=126, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=130, starts_line=8, is_jump_target=True, positions=None), + Instruction(opname='JUMP_FORWARD', opcode=110, arg=19, argval=176, argrepr='to 176', offset=134, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='END_FOR', opcode=4, arg=None, argval=None, argrepr='', offset=138, starts_line=3, is_jump_target=True, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=142, starts_line=10, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=156, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=160, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=172, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST_CHECK', opcode=127, arg=0, argval='i', argrepr='i', offset=176, starts_line=11, is_jump_target=True, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=56, argval=296, argrepr='to 296', offset=180, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=184, starts_line=12, is_jump_target=True, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=198, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=202, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=214, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=218, starts_line=13, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=222, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='BINARY_OP', opcode=122, arg=23, argval=23, argrepr='-=', offset=226, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=232, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=236, starts_line=14, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=240, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=244, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=2, argval=260, argrepr='to 260', offset=252, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=42, argval=176, argrepr='to 176', offset=256, starts_line=15, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=260, starts_line=16, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=264, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=268, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=2, argval=284, argrepr='to 284', offset=276, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_FORWARD', opcode=110, arg=23, argval=330, argrepr='to 330', offset=280, starts_line=17, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=284, starts_line=11, is_jump_target=True, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=2, argval=296, argrepr='to 296', offset=288, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=56, argval=184, argrepr='to 184', offset=292, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=296, starts_line=19, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=310, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=314, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=326, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=24, argval=282, argrepr='to 282', offset=328, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=330, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=332, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=334, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=336, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=338, starts_line=22, is_jump_target=False, positions=None), - Instruction(opname='CHECK_EXC_MATCH', opcode=36, arg=None, argval=None, argrepr='', offset=350, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=16, argval=386, argrepr='to 386', offset=352, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=354, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=356, starts_line=23, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=368, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=370, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=380, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=382, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=52, argval=282, argrepr='to 282', offset=384, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=386, starts_line=22, is_jump_target=True, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=388, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=390, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=392, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=394, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=396, starts_line=28, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=408, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=410, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=420, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=422, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=424, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=426, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=428, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='NOP', opcode=9, arg=None, argval=None, argrepr='', offset=330, starts_line=20, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=334, starts_line=21, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=338, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='BINARY_OP', opcode=122, arg=11, argval=11, argrepr='/', offset=342, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=348, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=352, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='BEFORE_WITH', opcode=53, arg=None, argval=None, argrepr='', offset=356, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=360, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=364, starts_line=26, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=378, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=382, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=394, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=398, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=402, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=406, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=2, argval=2, argrepr='', offset=410, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=422, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=426, starts_line=28, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=440, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=444, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=456, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=460, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=464, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=468, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='WITH_EXCEPT_START', opcode=49, arg=None, argval=None, argrepr='', offset=472, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=2, argval=484, argrepr='to 484', offset=476, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=2, argval=2, argrepr='', offset=480, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=484, starts_line=None, is_jump_target=True, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=488, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=492, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=496, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=39, argval=426, argrepr='to 426', offset=500, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=504, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=508, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=512, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=516, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=520, starts_line=22, is_jump_target=False, positions=None), + Instruction(opname='CHECK_EXC_MATCH', opcode=36, arg=None, argval=None, argrepr='', offset=534, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=23, argval=588, argrepr='to 588', offset=538, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=542, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=546, starts_line=23, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=560, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=564, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=576, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=580, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=81, argval=426, argrepr='to 426', offset=584, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=588, starts_line=22, is_jump_target=True, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=592, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=596, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=600, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=604, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=608, starts_line=28, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=622, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=626, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=638, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=642, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=646, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=650, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=654, starts_line=None, is_jump_target=False, positions=None), ] # One last piece of inspect fodder to check the default line number handling @@ -1675,8 +1677,8 @@ def simple(): pass class InstructionTestCase(BytecodeTestCase): def assertInstructionsEqual(self, instrs_1, instrs_2, /): - instrs_1 = [instr_1._replace(positions=None) for instr_1 in instrs_1] - instrs_2 = [instr_2._replace(positions=None) for instr_2 in instrs_2] + instrs_1 = [instr_1._replace(positions=None, offset=0) for instr_1 in instrs_1] + instrs_2 = [instr_2._replace(positions=None, offset=0) for instr_2 in instrs_2] self.assertEqual(instrs_1, instrs_2) class InstructionTests(InstructionTestCase): @@ -1764,20 +1766,17 @@ def roots(a, b, c): if d: yield (-b + cmath.sqrt(d)) / (2 * a) code = roots.__code__ - ops = code.co_code[::2] - cache_opcode = opcode.opmap["CACHE"] - caches = sum(op == cache_opcode for op in ops) - non_caches = len(ops) - caches + # Make sure we have "lots of caches". If not, roots should be changed: - assert 1 / 3 <= caches / non_caches, "this test needs more caches!" + num_insts = len(list(dis.get_instructions(code, show_caches=False))) + with_caches = len(list(dis.get_instructions(code, show_caches=True))) + caches = with_caches - num_insts + assert 1 / 3 <= caches / num_insts, "this test needs more caches!" for show_caches in (False, True): for adaptive in (False, True): with self.subTest(f"{adaptive=}, {show_caches=}"): - co_positions = [ - positions - for op, positions in zip(ops, code.co_positions(), strict=True) - if show_caches or op != cache_opcode - ] + co_positions = list(dis._get_co_positions( + code, show_caches=show_caches)) dis_positions = [ instruction.positions for instruction in dis.get_instructions( @@ -1847,7 +1846,8 @@ def test_from_traceback_dis(self): @requires_debug_ranges() def test_bytecode_co_positions(self): bytecode = dis.Bytecode("a=1") - for instr, positions in zip(bytecode, bytecode.codeobj.co_positions()): + expected = list(bytecode.codeobj.co_positions())[::2] # skip oparg2, oparg3 lines + for instr, positions in zip(bytecode, expected): assert instr.positions == positions class TestBytecodeTestCase(BytecodeTestCase): diff --git a/Lib/test/test_distutils.py b/Lib/test/test_distutils.py deleted file mode 100644 index 28320fb5c0bfd1..00000000000000 --- a/Lib/test/test_distutils.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Tests for distutils. - -The tests for distutils are defined in the distutils.tests package; -the test_suite() function there returns a test suite that's ready to -be run. -""" - -import unittest -from test import support -from test.support import warnings_helper - -with warnings_helper.check_warnings( - ("The distutils package is deprecated", DeprecationWarning), quiet=True): - - import distutils.tests - - -def load_tests(*_): - # used by unittest - return distutils.tests.test_suite() - - -def tearDownModule(): - support.reap_children() - -if support.check_sanitizer(address=True): - raise unittest.SkipTest("Exposes ASAN flakiness in GitHub CI") - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index fa9815c681e0ae..2dda7ccf7bf80c 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -346,33 +346,37 @@ def test_simple_initialization_api(self): out, err = self.run_embedded_interpreter("test_repeated_simple_init") self.assertEqual(out, 'Finalized\n' * INIT_LOOPS) - def test_quickened_static_code_gets_unquickened_at_Py_FINALIZE(self): + def test_specialized_static_code_gets_unspecialized_at_Py_FINALIZE(self): # https://github.com/python/cpython/issues/92031 - # Do these imports outside of the code string to avoid using - # importlib too much from within the code string, so that - # _handle_fromlist doesn't get quickened until we intend it to. - from dis import _all_opmap - resume = _all_opmap["RESUME"] - resume_quick = _all_opmap["RESUME_QUICK"] - from test.test_dis import QUICKENING_WARMUP_DELAY - - code = textwrap.dedent(f"""\ + code = textwrap.dedent("""\ + import dis import importlib._bootstrap + import opcode + import test.test_dis + + def is_specialized(f): + for instruction in dis.get_instructions(f, adaptive=True): + opname = instruction.opname + if ( + opname in opcode._specialized_instructions + # Exclude superinstructions: + and "__" not in opname + ): + return True + return False + func = importlib._bootstrap._handle_fromlist - code = func.__code__ - # Assert initially unquickened. - # Use sets to account for byte order. - if set(code._co_code_adaptive[:2]) != set([{resume}, 0]): - raise AssertionError() + # "copy" the code to un-specialize it: + func.__code__ = func.__code__.replace() + + assert not is_specialized(func), "specialized instructions found" - for i in range({QUICKENING_WARMUP_DELAY}): + for i in range(test.test_dis.ADAPTIVE_WARMUP_DELAY): func(importlib._bootstrap, ["x"], lambda *args: None) - # Assert quickening worked - if set(code._co_code_adaptive[:2]) != set([{resume_quick}, 0]): - raise AssertionError() + assert is_specialized(func), "no specialized instructions found" print("Tests passed") """) @@ -1484,17 +1488,11 @@ def test_init_pyvenv_cfg(self): if not MS_WINDOWS: paths[-1] = lib_dynload else: - # Include DLLs directory as well - paths.insert(1, '.\\DLLs') - for index, path in enumerate(paths): - if index == 0: - # Because we copy the DLLs into tmpdir as well, the zip file - # entry in sys.path will be there. For a regular venv, it will - # usually be in the home directory. - paths[index] = os.path.join(tmpdir, os.path.basename(path)) - else: - paths[index] = os.path.join(pyvenv_home, os.path.basename(path)) - paths[-1] = pyvenv_home + paths = [ + os.path.join(tmpdir, os.path.basename(paths[0])), + pyvenv_home, + os.path.join(pyvenv_home, "Lib"), + ] executable = self.test_exe base_executable = os.path.join(pyvenv_home, os.path.basename(executable)) @@ -1511,12 +1509,12 @@ def test_init_pyvenv_cfg(self): config['base_prefix'] = pyvenv_home config['prefix'] = pyvenv_home config['stdlib_dir'] = os.path.join(pyvenv_home, 'Lib') - config['use_frozen_modules'] = not support.Py_DEBUG + config['use_frozen_modules'] = int(not support.Py_DEBUG) else: # cannot reliably assume stdlib_dir here because it # depends too much on our build. But it ought to be found config['stdlib_dir'] = self.IGNORE_CONFIG - config['use_frozen_modules'] = not support.Py_DEBUG + config['use_frozen_modules'] = int(not support.Py_DEBUG) env = self.copy_paths_by_env(config) self.check_all_configs("test_init_compat_config", config, diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index f50017d916f5e3..d876c8e5fb7798 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -14,7 +14,7 @@ from enum import Enum, IntEnum, StrEnum, EnumType, Flag, IntFlag, unique, auto from enum import STRICT, CONFORM, EJECT, KEEP, _simple_enum, _test_simple_enum from enum import verify, UNIQUE, CONTINUOUS, NAMED_FLAGS, ReprEnum -from enum import member, nonmember +from enum import member, nonmember, _iter_bits_lsb from io import StringIO from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL from test import support @@ -174,6 +174,10 @@ def test_is_private(self): for name in self.sunder_names + self.dunder_names + self.random_names: self.assertFalse(enum._is_private('MyEnum', name), '%r is a private name?') + def test_iter_bits_lsb(self): + self.assertEqual(list(_iter_bits_lsb(7)), [1, 2, 4]) + self.assertRaisesRegex(ValueError, '-8 is not a positive integer', list, _iter_bits_lsb(-8)) + # for subclassing tests @@ -1317,6 +1321,21 @@ def test_programmatic_function_type_from_subclass_with_start(self): self.assertIn(e, MinorEnum) self.assertIs(type(e), MinorEnum) + def test_programmatic_function_is_value_call(self): + class TwoPart(Enum): + ONE = 1, 1.0 + TWO = 2, 2.0 + THREE = 3, 3.0 + self.assertRaisesRegex(ValueError, '1 is not a valid .*TwoPart', TwoPart, 1) + self.assertIs(TwoPart((1, 1.0)), TwoPart.ONE) + self.assertIs(TwoPart(1, 1.0), TwoPart.ONE) + class ThreePart(Enum): + ONE = 1, 1.0, 'one' + TWO = 2, 2.0, 'two' + THREE = 3, 3.0, 'three' + self.assertIs(ThreePart((3, 3.0, 'three')), ThreePart.THREE) + self.assertIs(ThreePart(3, 3.0, 'three'), ThreePart.THREE) + def test_intenum_from_bytes(self): self.assertIs(IntStooges.from_bytes(b'\x00\x03', 'big'), IntStooges.MOE) with self.assertRaises(ValueError): @@ -1459,7 +1478,7 @@ class MoreColor(Color): class EvenMoreColor(Color, IntEnum): chartruese = 7 # - with self.assertRaisesRegex(TypeError, "<enum .Foo.> cannot extend <enum .Color.>"): + with self.assertRaisesRegex(ValueError, r"\(.Foo., \(.pink., .black.\)\) is not a valid .*Color"): Color('Foo', ('pink', 'black')) def test_exclude_methods(self): @@ -2698,17 +2717,67 @@ def upper(self): def test_repr_with_dataclass(self): "ensure dataclass-mixin has correct repr()" - from dataclasses import dataclass - @dataclass + # + # check overridden dataclass __repr__ is used + # + from dataclasses import dataclass, field + @dataclass(repr=False) class Foo: __qualname__ = 'Foo' a: int + def __repr__(self): + return 'ha hah!' class Entries(Foo, Enum): ENTRY1 = 1 self.assertTrue(isinstance(Entries.ENTRY1, Foo)) self.assertTrue(Entries._member_type_ is Foo, Entries._member_type_) self.assertTrue(Entries.ENTRY1.value == Foo(1), Entries.ENTRY1.value) - self.assertEqual(repr(Entries.ENTRY1), '<Entries.ENTRY1: Foo(a=1)>') + self.assertEqual(repr(Entries.ENTRY1), '<Entries.ENTRY1: ha hah!>') + # + # check auto-generated dataclass __repr__ is not used + # + @dataclass + class CreatureDataMixin: + __qualname__ = 'CreatureDataMixin' + size: str + legs: int + tail: bool = field(repr=False, default=True) + class Creature(CreatureDataMixin, Enum): + __qualname__ = 'Creature' + BEETLE = ('small', 6) + DOG = ('medium', 4) + self.assertEqual(repr(Creature.DOG), "<Creature.DOG: size='medium', legs=4>") + # + # check inherited repr used + # + class Huh: + def __repr__(self): + return 'inherited' + @dataclass(repr=False) + class CreatureDataMixin(Huh): + __qualname__ = 'CreatureDataMixin' + size: str + legs: int + tail: bool = field(repr=False, default=True) + class Creature(CreatureDataMixin, Enum): + __qualname__ = 'Creature' + BEETLE = ('small', 6) + DOG = ('medium', 4) + self.assertEqual(repr(Creature.DOG), "<Creature.DOG: inherited>") + # + # check default object.__repr__ used if nothing provided + # + @dataclass(repr=False) + class CreatureDataMixin: + __qualname__ = 'CreatureDataMixin' + size: str + legs: int + tail: bool = field(repr=False, default=True) + class Creature(CreatureDataMixin, Enum): + __qualname__ = 'Creature' + BEETLE = ('small', 6) + DOG = ('medium', 4) + self.assertRegex(repr(Creature.DOG), "<Creature.DOG: .*CreatureDataMixin object at .*>") def test_repr_with_init_data_type_mixin(self): # non-data_type is a mixin that doesn't define __new__ @@ -2772,6 +2841,19 @@ class MyIntFlag(IntFlag): self.assertEqual(deep, flags) self.assertEqual(copied.value, 1 | 2 | 8) + def test_namedtuple_as_value(self): + from collections import namedtuple + TTuple = namedtuple('TTuple', 'id a blist') + class NTEnum(Enum): + NONE = TTuple(0, 0, []) + A = TTuple(1, 2, [4]) + B = TTuple(2, 4, [0, 1, 2]) + self.assertEqual(repr(NTEnum.NONE), "<NTEnum.NONE: TTuple(id=0, a=0, blist=[])>") + self.assertEqual(NTEnum.NONE.value, TTuple(id=0, a=0, blist=[])) + self.assertEqual( + [x.value for x in NTEnum], + [TTuple(id=0, a=0, blist=[]), TTuple(id=1, a=2, blist=[4]), TTuple(id=2, a=4, blist=[0, 1, 2])], + ) class TestOrder(unittest.TestCase): "test usage of the `_order_` attribute" @@ -3960,6 +4042,16 @@ class Sillier(IntEnum): triple = 3 value = 4 + def test_negative_alias(self): + @verify(NAMED_FLAGS) + class Color(Flag): + RED = 1 + GREEN = 2 + BLUE = 4 + WHITE = -1 + # no error means success + + class TestInternals(unittest.TestCase): sunder_names = '_bad_', '_good_', '_what_ho_' @@ -4116,6 +4208,50 @@ class Dupes(Enum): third = auto() self.assertEqual([Dupes.first, Dupes.second, Dupes.third], list(Dupes)) + def test_multiple_auto_on_line(self): + class Huh(Enum): + ONE = auto() + TWO = auto(), auto() + THREE = auto(), auto(), auto() + self.assertEqual(Huh.ONE.value, 1) + self.assertEqual(Huh.TWO.value, (2, 3)) + self.assertEqual(Huh.THREE.value, (4, 5, 6)) + # + class Hah(Enum): + def __new__(cls, value, abbr=None): + member = object.__new__(cls) + member._value_ = value + member.abbr = abbr or value[:3].lower() + return member + def _generate_next_value_(name, start, count, last): + return name + # + MONDAY = auto() + TUESDAY = auto() + WEDNESDAY = auto(), 'WED' + THURSDAY = auto(), 'Thu' + FRIDAY = auto() + self.assertEqual(Hah.MONDAY.value, 'MONDAY') + self.assertEqual(Hah.MONDAY.abbr, 'mon') + self.assertEqual(Hah.TUESDAY.value, 'TUESDAY') + self.assertEqual(Hah.TUESDAY.abbr, 'tue') + self.assertEqual(Hah.WEDNESDAY.value, 'WEDNESDAY') + self.assertEqual(Hah.WEDNESDAY.abbr, 'WED') + self.assertEqual(Hah.THURSDAY.value, 'THURSDAY') + self.assertEqual(Hah.THURSDAY.abbr, 'Thu') + self.assertEqual(Hah.FRIDAY.value, 'FRIDAY') + self.assertEqual(Hah.FRIDAY.abbr, 'fri') + # + class Huh(Enum): + def _generate_next_value_(name, start, count, last): + return count+1 + ONE = auto() + TWO = auto(), auto() + THREE = auto(), auto(), auto() + self.assertEqual(Huh.ONE.value, 1) + self.assertEqual(Huh.TWO.value, (2, 2)) + self.assertEqual(Huh.THREE.value, (3, 3, 3)) + class TestEnumTypeSubclassing(unittest.TestCase): pass @@ -4123,7 +4259,7 @@ class TestEnumTypeSubclassing(unittest.TestCase): Help on class Color in module %s: class Color(enum.Enum) - | Color(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None) + | Color(value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None) | | Method resolution order: | Color @@ -4179,7 +4315,7 @@ class Color(enum.Enum) Help on class Color in module %s: class Color(enum.Enum) - | Color(value, names=None, *, module=None, qualname=None, type=None, start=1) + | Color(value, names=None, *values, module=None, qualname=None, type=None, start=1) | | Method resolution order: | Color @@ -4332,10 +4468,16 @@ class SimpleColor: CYAN = 1 MAGENTA = 2 YELLOW = 3 + @bltns.property + def zeroth(self): + return 'zeroed %s' % self.name class CheckedColor(Enum): CYAN = 1 MAGENTA = 2 YELLOW = 3 + @bltns.property + def zeroth(self): + return 'zeroed %s' % self.name self.assertTrue(_test_simple_enum(CheckedColor, SimpleColor) is None) SimpleColor.MAGENTA._value_ = 9 self.assertRaisesRegex( @@ -4424,11 +4566,6 @@ class Quadruple(Enum): COMPLEX_A = 2j COMPLEX_B = 3j -class _ModuleWrapper: - """We use this class as a namespace for swapping modules.""" - def __init__(self, module): - self.__dict__.update(module.__dict__) - class TestConvert(unittest.TestCase): def tearDown(self): # Reset the module-level test variables to their original integer @@ -4468,12 +4605,6 @@ def test_convert_int(self): self.assertEqual(test_type.CONVERT_TEST_NAME_D, 5) self.assertEqual(test_type.CONVERT_TEST_NAME_E, 5) # Ensure that test_type only picked up names matching the filter. - int_dir = dir(int) + [ - 'CONVERT_TEST_NAME_A', 'CONVERT_TEST_NAME_B', 'CONVERT_TEST_NAME_C', - 'CONVERT_TEST_NAME_D', 'CONVERT_TEST_NAME_E', 'CONVERT_TEST_NAME_F', - 'CONVERT_TEST_SIGABRT', 'CONVERT_TEST_SIGIOT', - 'CONVERT_TEST_EIO', 'CONVERT_TEST_EBUS', - ] extra = [name for name in dir(test_type) if name not in enum_dir(test_type)] missing = [name for name in enum_dir(test_type) if name not in dir(test_type)] self.assertEqual( @@ -4515,7 +4646,6 @@ def test_convert_str(self): self.assertEqual(test_type.CONVERT_STR_TEST_1, 'hello') self.assertEqual(test_type.CONVERT_STR_TEST_2, 'goodbye') # Ensure that test_type only picked up names matching the filter. - str_dir = dir(str) + ['CONVERT_STR_TEST_1', 'CONVERT_STR_TEST_2'] extra = [name for name in dir(test_type) if name not in enum_dir(test_type)] missing = [name for name in enum_dir(test_type) if name not in dir(test_type)] self.assertEqual( @@ -4583,8 +4713,6 @@ def member_dir(member): allowed.add(name) return sorted(allowed) -missing = object() - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_except_star.py b/Lib/test/test_except_star.py index dbe8eff32924ed..9de72dbd5a3264 100644 --- a/Lib/test/test_except_star.py +++ b/Lib/test/test_except_star.py @@ -1000,5 +1000,204 @@ def test_exc_info_restored(self): self.assertEqual(sys.exc_info(), (None, None, None)) +class TestExceptStar_WeirdLeafExceptions(ExceptStarTest): + # Test that except* works when leaf exceptions are + # unhashable or have a bad custom __eq__ + + class UnhashableExc(ValueError): + __hash__ = None + + class AlwaysEqualExc(ValueError): + def __eq__(self, other): + return True + + class NeverEqualExc(ValueError): + def __eq__(self, other): + return False + + class BrokenEqualExc(ValueError): + def __eq__(self, other): + raise RuntimeError() + + def setUp(self): + self.bad_types = [self.UnhashableExc, + self.AlwaysEqualExc, + self.NeverEqualExc, + self.BrokenEqualExc] + + def except_type(self, eg, type): + match, rest = None, None + try: + try: + raise eg + except* type as e: + match = e + except Exception as e: + rest = e + return match, rest + + def test_catch_unhashable_leaf_exception(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, Bad) + self.assertExceptionIsLike( + match, ExceptionGroup("eg", [Bad(2)])) + self.assertExceptionIsLike( + rest, ExceptionGroup("eg", [TypeError(1)])) + + def test_propagate_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, TypeError) + self.assertExceptionIsLike( + match, ExceptionGroup("eg", [TypeError(1)])) + self.assertExceptionIsLike( + rest, ExceptionGroup("eg", [Bad(2)])) + + def test_catch_nothing_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, OSError) + self.assertIsNone(match) + self.assertExceptionIsLike(rest, eg) + + def test_catch_everything_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, Exception) + self.assertExceptionIsLike(match, eg) + self.assertIsNone(rest) + + def test_reraise_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup( + "eg", [TypeError(1), Bad(2), ValueError(3)]) + + try: + try: + raise eg + except* TypeError: + pass + except* Bad: + raise + except Exception as e: + exc = e + + self.assertExceptionIsLike( + exc, ExceptionGroup("eg", [Bad(2), ValueError(3)])) + + +class TestExceptStar_WeirdExceptionGroupSubclass(ExceptStarTest): + # Test that except* works with exception groups that are + # unhashable or have a bad custom __eq__ + + class UnhashableEG(ExceptionGroup): + __hash__ = None + + def derive(self, excs): + return type(self)(self.message, excs) + + class AlwaysEqualEG(ExceptionGroup): + def __eq__(self, other): + return True + + def derive(self, excs): + return type(self)(self.message, excs) + + class NeverEqualEG(ExceptionGroup): + def __eq__(self, other): + return False + + def derive(self, excs): + return type(self)(self.message, excs) + + class BrokenEqualEG(ExceptionGroup): + def __eq__(self, other): + raise RuntimeError() + + def derive(self, excs): + return type(self)(self.message, excs) + + def setUp(self): + self.bad_types = [self.UnhashableEG, + self.AlwaysEqualEG, + self.NeverEqualEG, + self.BrokenEqualEG] + + def except_type(self, eg, type): + match, rest = None, None + try: + try: + raise eg + except* type as e: + match = e + except Exception as e: + rest = e + return match, rest + + def test_catch_some_unhashable_exception_group_subclass(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + eg = BadEG("eg", + [TypeError(1), + BadEG("nested", [ValueError(2)])]) + + match, rest = self.except_type(eg, TypeError) + self.assertExceptionIsLike(match, BadEG("eg", [TypeError(1)])) + self.assertExceptionIsLike(rest, + BadEG("eg", [BadEG("nested", [ValueError(2)])])) + + def test_catch_none_unhashable_exception_group_subclass(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + + eg = BadEG("eg", + [TypeError(1), + BadEG("nested", [ValueError(2)])]) + + match, rest = self.except_type(eg, OSError) + self.assertIsNone(match) + self.assertExceptionIsLike(rest, eg) + + def test_catch_all_unhashable_exception_group_subclass(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + + eg = BadEG("eg", + [TypeError(1), + BadEG("nested", [ValueError(2)])]) + + match, rest = self.except_type(eg, Exception) + self.assertExceptionIsLike(match, eg) + self.assertIsNone(rest) + + def test_reraise_unhashable_eg(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + + eg = BadEG("eg", + [TypeError(1), ValueError(2), + BadEG("nested", [ValueError(3), OSError(4)])]) + + try: + try: + raise eg + except* ValueError: + pass + except* OSError: + raise + except Exception as e: + exc = e + + self.assertExceptionIsLike( + exc, BadEG("eg", [TypeError(1), + BadEG("nested", [OSError(4)])])) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_exception_group.py b/Lib/test/test_exception_group.py index aa28e16bedfa68..b11524e778e665 100644 --- a/Lib/test/test_exception_group.py +++ b/Lib/test/test_exception_group.py @@ -78,16 +78,30 @@ def test_BEG_wraps_BaseException__creates_BEG(self): beg = BaseExceptionGroup("beg", [ValueError(1), KeyboardInterrupt(2)]) self.assertIs(type(beg), BaseExceptionGroup) - def test_EG_subclass_wraps_anything(self): + def test_EG_subclass_wraps_non_base_exceptions(self): class MyEG(ExceptionGroup): pass self.assertIs( type(MyEG("eg", [ValueError(12), TypeError(42)])), MyEG) - self.assertIs( - type(MyEG("eg", [ValueError(12), KeyboardInterrupt(42)])), - MyEG) + + def test_EG_subclass_does_not_wrap_base_exceptions(self): + class MyEG(ExceptionGroup): + pass + + msg = "Cannot nest BaseExceptions in 'MyEG'" + with self.assertRaisesRegex(TypeError, msg): + MyEG("eg", [ValueError(12), KeyboardInterrupt(42)]) + + def test_BEG_and_E_subclass_does_not_wrap_base_exceptions(self): + class MyEG(BaseExceptionGroup, ValueError): + pass + + msg = "Cannot nest BaseExceptions in 'MyEG'" + with self.assertRaisesRegex(TypeError, msg): + MyEG("eg", [ValueError(12), KeyboardInterrupt(42)]) + def test_BEG_subclass_wraps_anything(self): class MyBEG(BaseExceptionGroup): diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index 4b5bb7f94ac469..ed413f105e5b17 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -2,11 +2,17 @@ import re import sys import textwrap +import threading import types import unittest import weakref +try: + import _testcapi +except ImportError: + _testcapi = None from test import support +from test.support import threading_helper from test.support.script_helper import assert_python_ok @@ -325,6 +331,83 @@ def f(): if old_enabled: gc.enable() + @support.cpython_only + @threading_helper.requires_working_threading() + def test_sneaky_frame_object_teardown(self): + + class SneakyDel: + def __del__(self): + """ + Stash a reference to the entire stack for walking later. + + It may look crazy, but you'd be surprised how common this is + when using a test runner (like pytest). The typical recipe is: + ResourceWarning + -Werror + a custom sys.unraisablehook. + """ + nonlocal sneaky_frame_object + sneaky_frame_object = sys._getframe() + + class SneakyThread(threading.Thread): + """ + A separate thread isn't needed to make this code crash, but it does + make crashes more consistent, since it means sneaky_frame_object is + backed by freed memory after the thread completes! + """ + + def run(self): + """Run SneakyDel.__del__ as this frame is popped.""" + ref = SneakyDel() + + sneaky_frame_object = None + t = SneakyThread() + t.start() + t.join() + # sneaky_frame_object can be anything, really, but it's crucial that + # SneakyThread.run's frame isn't anywhere on the stack while it's being + # torn down: + self.assertIsNotNone(sneaky_frame_object) + while sneaky_frame_object is not None: + self.assertIsNot( + sneaky_frame_object.f_code, SneakyThread.run.__code__ + ) + sneaky_frame_object = sneaky_frame_object.f_back + +@unittest.skipIf(_testcapi is None, 'need _testcapi') +class TestCAPI(unittest.TestCase): + def getframe(self): + return sys._getframe() + + def test_frame_getters(self): + frame = self.getframe() + self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) + self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) + self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) + self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) + + def test_getvar(self): + current_frame = sys._getframe() + x = 1 + self.assertEqual(_testcapi.frame_getvar(current_frame, "x"), 1) + self.assertEqual(_testcapi.frame_getvarstring(current_frame, b"x"), 1) + with self.assertRaises(NameError): + _testcapi.frame_getvar(current_frame, "y") + with self.assertRaises(NameError): + _testcapi.frame_getvarstring(current_frame, b"y") + + # wrong name type + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, b'x') + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, 123) + + def getgenframe(self): + yield sys._getframe() + + def test_frame_get_generator(self): + gen = self.getgenframe() + frame = next(gen) + self.assertIs(gen, _testcapi.frame_getgenerator(frame)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index bf3a5b0bbccdfb..318f38a6ed5b14 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -776,7 +776,7 @@ def test_backslashes_in_string_part(self): self.assertEqual(f'2\x203', '2 3') self.assertEqual(f'\x203', ' 3') - with self.assertWarns(DeprecationWarning): # invalid escape sequence + with self.assertWarns(SyntaxWarning): # invalid escape sequence value = eval(r"f'\{6*7}'") self.assertEqual(value, '\\42') self.assertEqual(f'\\{6*7}', '\\42') diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py index 082a90d46baedc..544228e3bab47b 100644 --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -21,13 +21,11 @@ from test.support import threading_helper from test.support import socket_helper from test.support import warnings_helper +from test.support import asynchat +from test.support import asyncore from test.support.socket_helper import HOST, HOSTv6 -asynchat = warnings_helper.import_deprecated('asynchat') -asyncore = warnings_helper.import_deprecated('asyncore') - - support.requires_working_socket(module=True) TIMEOUT = support.LOOPBACK_TIMEOUT @@ -984,11 +982,11 @@ def test_context(self): ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE - self.assertRaises(ValueError, ftplib.FTP_TLS, keyfile=CERTFILE, + self.assertRaises(TypeError, ftplib.FTP_TLS, keyfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE, + self.assertRaises(TypeError, ftplib.FTP_TLS, certfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE, + self.assertRaises(TypeError, ftplib.FTP_TLS, certfile=CERTFILE, keyfile=CERTFILE, context=ctx) self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT) diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index 087f72768fa4bf..db7cb9ace6e5f3 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -542,48 +542,6 @@ def __getattr__(self, someattribute): self.assertEqual(gc.collect(), 2) self.assertEqual(len(gc.garbage), garbagelen) - def test_boom_new(self): - # boom__new and boom2_new are exactly like boom and boom2, except use - # new-style classes. - - class Boom_New(object): - def __getattr__(self, someattribute): - del self.attr - raise AttributeError - - a = Boom_New() - b = Boom_New() - a.attr = b - b.attr = a - - gc.collect() - garbagelen = len(gc.garbage) - del a, b - self.assertEqual(gc.collect(), 2) - self.assertEqual(len(gc.garbage), garbagelen) - - def test_boom2_new(self): - class Boom2_New(object): - def __init__(self): - self.x = 0 - - def __getattr__(self, someattribute): - self.x += 1 - if self.x > 1: - del self.attr - raise AttributeError - - a = Boom2_New() - b = Boom2_New() - a.attr = b - b.attr = a - - gc.collect() - garbagelen = len(gc.garbage) - del a, b - self.assertEqual(gc.collect(), 2) - self.assertEqual(len(gc.garbage), garbagelen) - def test_get_referents(self): alist = [1, 3, 5] got = gc.get_referents(alist) diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 42cc20c4676640..492b77a954d865 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -306,6 +306,26 @@ def gen(): self.assertEqual(next(g), "done") self.assertEqual(sys.exc_info(), (None, None, None)) + def test_nested_gen_except_loop(self): + def gen(): + for i in range(100): + self.assertIsInstance(sys.exception(), TypeError) + yield "doing" + + def outer(): + try: + raise TypeError + except: + for x in gen(): + yield x + + try: + raise ValueError + except Exception: + for x in outer(): + self.assertEqual(x, "doing") + self.assertEqual(sys.exception(), None) + def test_except_throw_exception_context(self): def gen(): try: diff --git a/Lib/test/test_getpath.py b/Lib/test/test_getpath.py index 1e46cb5780c942..bdcf4a37191682 100644 --- a/Lib/test/test_getpath.py +++ b/Lib/test/test_getpath.py @@ -238,6 +238,29 @@ def test_buildtree_pythonhome_win32(self): actual = getpath(ns, expected) self.assertEqual(expected, actual) + def test_no_dlls_win32(self): + "Test a layout on Windows with no DLLs directory." + ns = MockNTNamespace( + argv0=r"C:\Python\python.exe", + real_executable=r"C:\Python\python.exe", + ) + ns.add_known_xfile(r"C:\Python\python.exe") + ns.add_known_file(r"C:\Python\Lib\os.py") + expected = dict( + executable=r"C:\Python\python.exe", + base_executable=r"C:\Python\python.exe", + prefix=r"C:\Python", + exec_prefix=r"C:\Python", + module_search_paths_set=1, + module_search_paths=[ + r"C:\Python\python98.zip", + r"C:\Python\Lib", + r"C:\Python", + ], + ) + actual = getpath(ns, expected) + self.assertEqual(expected, actual) + def test_normal_posix(self): "Test a 'standard' install layout on *nix" ns = MockPosixNamespace( @@ -359,6 +382,70 @@ def test_venv_changed_name_posix(self): actual = getpath(ns, expected) self.assertEqual(expected, actual) + def test_venv_non_installed_zip_path_posix(self): + "Test a venv created from non-installed python has correct zip path.""" + ns = MockPosixNamespace( + argv0="/venv/bin/python", + PREFIX="/usr", + ENV_PATH="/venv/bin:/usr/bin", + ) + ns.add_known_xfile("/path/to/non-installed/bin/python") + ns.add_known_xfile("/venv/bin/python") + ns.add_known_link("/venv/bin/python", + "/path/to/non-installed/bin/python") + ns.add_known_file("/path/to/non-installed/lib/python9.8/os.py") + ns.add_known_dir("/path/to/non-installed/lib/python9.8/lib-dynload") + ns.add_known_file("/venv/pyvenv.cfg", [ + r"home = /path/to/non-installed" + ]) + expected = dict( + executable="/venv/bin/python", + prefix="/path/to/non-installed", + exec_prefix="/path/to/non-installed", + base_executable="/path/to/non-installed/bin/python", + base_prefix="/path/to/non-installed", + base_exec_prefix="/path/to/non-installed", + module_search_paths_set=1, + module_search_paths=[ + "/path/to/non-installed/lib/python98.zip", + "/path/to/non-installed/lib/python9.8", + "/path/to/non-installed/lib/python9.8/lib-dynload", + ], + ) + actual = getpath(ns, expected) + self.assertEqual(expected, actual) + + def test_venv_changed_name_copy_posix(self): + "Test a venv --copies layout on *nix that lacks a distributed 'python'" + ns = MockPosixNamespace( + argv0="python", + PREFIX="/usr", + ENV_PATH="/venv/bin:/usr/bin", + ) + ns.add_known_xfile("/usr/bin/python9") + ns.add_known_xfile("/venv/bin/python") + ns.add_known_file("/usr/lib/python9.8/os.py") + ns.add_known_dir("/usr/lib/python9.8/lib-dynload") + ns.add_known_file("/venv/pyvenv.cfg", [ + r"home = /usr/bin" + ]) + expected = dict( + executable="/venv/bin/python", + prefix="/usr", + exec_prefix="/usr", + base_executable="/usr/bin/python9", + base_prefix="/usr", + base_exec_prefix="/usr", + module_search_paths_set=1, + module_search_paths=[ + "/usr/lib/python98.zip", + "/usr/lib/python9.8", + "/usr/lib/python9.8/lib-dynload", + ], + ) + actual = getpath(ns, expected) + self.assertEqual(expected, actual) + def test_symlink_normal_posix(self): "Test a 'standard' install layout via symlink on *nix" ns = MockPosixNamespace( diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index b3d94e0a21cb6a..620a5b19109a8c 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -1978,7 +1978,7 @@ def test_local_unknown_cert(self): self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED') def test_local_good_hostname(self): - # The (valid) cert validates the HTTP hostname + # The (valid) cert validates the HTTPS hostname import ssl server = self.make_server(CERT_localhost) context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1991,7 +1991,7 @@ def test_local_good_hostname(self): self.assertEqual(resp.status, 404) def test_local_bad_hostname(self): - # The (valid) cert doesn't validate the HTTP hostname + # The (valid) cert doesn't validate the HTTPS hostname import ssl server = self.make_server(CERT_fakehostname) context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1999,38 +1999,21 @@ def test_local_bad_hostname(self): h = client.HTTPSConnection('localhost', server.port, context=context) with self.assertRaises(ssl.CertificateError): h.request('GET', '/') - # Same with explicit check_hostname=True - with warnings_helper.check_warnings(('', DeprecationWarning)): - h = client.HTTPSConnection('localhost', server.port, - context=context, check_hostname=True) + + # Same with explicit context.check_hostname=True + context.check_hostname = True + h = client.HTTPSConnection('localhost', server.port, context=context) with self.assertRaises(ssl.CertificateError): h.request('GET', '/') - # With check_hostname=False, the mismatching is ignored - context.check_hostname = False - with warnings_helper.check_warnings(('', DeprecationWarning)): - h = client.HTTPSConnection('localhost', server.port, - context=context, check_hostname=False) - h.request('GET', '/nonexistent') - resp = h.getresponse() - resp.close() - h.close() - self.assertEqual(resp.status, 404) - # The context's check_hostname setting is used if one isn't passed to - # HTTPSConnection. + + # With context.check_hostname=False, the mismatching is ignored context.check_hostname = False h = client.HTTPSConnection('localhost', server.port, context=context) h.request('GET', '/nonexistent') resp = h.getresponse() - self.assertEqual(resp.status, 404) resp.close() h.close() - # Passing check_hostname to HTTPSConnection should override the - # context's setting. - with warnings_helper.check_warnings(('', DeprecationWarning)): - h = client.HTTPSConnection('localhost', server.port, - context=context, check_hostname=True) - with self.assertRaises(ssl.CertificateError): - h.request('GET', '/') + self.assertEqual(resp.status, 404) @unittest.skipIf(not hasattr(client, 'HTTPSConnection'), 'http.client.HTTPSConnection not available') @@ -2066,11 +2049,9 @@ def test_tls13_pha(self): self.assertIs(h._context, context) self.assertFalse(h._context.post_handshake_auth) - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', 'key_file, cert_file and check_hostname are deprecated', - DeprecationWarning) - h = client.HTTPSConnection('localhost', 443, context=context, - cert_file=CERT_localhost) + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT, cert_file=CERT_localhost) + context.post_handshake_auth = True + h = client.HTTPSConnection('localhost', 443, context=context) self.assertTrue(h._context.post_handshake_auth) diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index a937258069ed89..ca078862cca6b9 100644 --- a/Lib/test/test_httpservers.py +++ b/Lib/test/test_httpservers.py @@ -26,7 +26,7 @@ import datetime import threading from unittest import mock -from io import BytesIO +from io import BytesIO, StringIO import unittest from test import support @@ -990,6 +990,27 @@ def verify_http_server_response(self, response): match = self.HTTPResponseMatch.search(response) self.assertIsNotNone(match) + def test_unprintable_not_logged(self): + # We call the method from the class directly as our Socketless + # Handler subclass overrode it... nice for everything BUT this test. + self.handler.client_address = ('127.0.0.1', 1337) + log_message = BaseHTTPRequestHandler.log_message + with mock.patch.object(sys, 'stderr', StringIO()) as fake_stderr: + log_message(self.handler, '/foo') + log_message(self.handler, '/\033bar\000\033') + log_message(self.handler, '/spam %s.', 'a') + log_message(self.handler, '/spam %s.', '\033\x7f\x9f\xa0beans') + log_message(self.handler, '"GET /foo\\b"ar\007 HTTP/1.0"') + stderr = fake_stderr.getvalue() + self.assertNotIn('\033', stderr) # non-printable chars are caught. + self.assertNotIn('\000', stderr) # non-printable chars are caught. + lines = stderr.splitlines() + self.assertIn('/foo', lines[0]) + self.assertIn(r'/\x1bbar\x00\x1b', lines[1]) + self.assertIn('/spam a.', lines[2]) + self.assertIn('/spam \\x1b\\x7f\\x9f\xa0beans.', lines[3]) + self.assertIn(r'"GET /foo\\b"ar\x07 HTTP/1.0"', lines[4]) + def test_http_1_1(self): result = self.send_typical_request(b'GET / HTTP/1.1\r\n\r\n') self.verify_http_server_response(result[0]) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index b554bc0c79960d..7626d9572e1e96 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -573,15 +573,6 @@ def test_ssl_verified(self): ssl_context=ssl_context) client.shutdown() - # Mock the private method _connect(), so mark the test as specific - # to CPython stdlib - @cpython_only - def test_certfile_arg_warn(self): - with warnings_helper.check_warnings(('', DeprecationWarning)): - with mock.patch.object(self.imap_class, 'open'): - with mock.patch.object(self.imap_class, '_connect'): - self.imap_class('localhost', 143, certfile=CERTFILE) - class ThreadedNetworkedTests(unittest.TestCase): server_class = socketserver.TCPServer imap_class = imaplib.IMAP4 @@ -1070,18 +1061,6 @@ def test_logout(self): rs = _server.logout() self.assertEqual(rs[0], 'BYE', rs) - def test_ssl_context_certfile_exclusive(self): - with socket_helper.transient_internet(self.host): - self.assertRaises( - ValueError, self.imap_class, self.host, self.port, - certfile=CERTFILE, ssl_context=self.create_ssl_context()) - - def test_ssl_context_keyfile_exclusive(self): - with socket_helper.transient_internet(self.host): - self.assertRaises( - ValueError, self.imap_class, self.host, self.port, - keyfile=CERTFILE, ssl_context=self.create_ssl_context()) - if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py index 446e913e5bf383..80abc720c3251a 100644 --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -1,3 +1,4 @@ +import gc import importlib import importlib.util import os @@ -413,6 +414,35 @@ def __init__(self, name): bltin = create_builtin(spec) self.assertEqual(bltin, builtins) + @support.cpython_only + def test_create_builtin_subinterp(self): + # gh-99578: create_builtin() behavior changes after the creation of the + # first sub-interpreter. Test both code paths, before and after the + # creation of a sub-interpreter. Previously, create_builtin() had + # a reference leak after the creation of the first sub-interpreter. + + import builtins + create_builtin = support.get_attribute(_imp, "create_builtin") + class Spec: + name = "builtins" + spec = Spec() + + def check_get_builtins(): + refcnt = sys.getrefcount(builtins) + mod = _imp.create_builtin(spec) + self.assertIs(mod, builtins) + self.assertEqual(sys.getrefcount(builtins), refcnt + 1) + # Check that a GC collection doesn't crash + gc.collect() + + check_get_builtins() + + ret = support.run_in_subinterp("import builtins") + self.assertEqual(ret, 0) + + check_get_builtins() + + class ReloadTests(unittest.TestCase): """Very basic tests to make sure that imp.reload() operates just like diff --git a/Lib/test/test_importlib/extension/test_case_sensitivity.py b/Lib/test/test_importlib/extension/test_case_sensitivity.py index 366e565cf4b7aa..0bb74fff5fcf96 100644 --- a/Lib/test/test_importlib/extension/test_case_sensitivity.py +++ b/Lib/test/test_importlib/extension/test_case_sensitivity.py @@ -8,7 +8,7 @@ machinery = util.import_importlib('importlib.machinery') -@unittest.skipIf(util.EXTENSIONS.filename is None, '_testcapi not available') +@unittest.skipIf(util.EXTENSIONS.filename is None, f'{util.EXTENSIONS.name} not available') @util.case_insensitive_tests class ExtensionModuleCaseSensitivityTest(util.CASEOKTestBase): diff --git a/Lib/test/test_importlib/extension/test_loader.py b/Lib/test/test_importlib/extension/test_loader.py index 8570c6bc90cd07..d69192b56bacb6 100644 --- a/Lib/test/test_importlib/extension/test_loader.py +++ b/Lib/test/test_importlib/extension/test_loader.py @@ -13,9 +13,9 @@ from test.support.script_helper import assert_python_failure -class LoaderTests(abc.LoaderTests): +class LoaderTests: - """Test load_module() for extension modules.""" + """Test ExtensionFileLoader.""" def setUp(self): if not self.machinery.EXTENSION_SUFFIXES: @@ -32,15 +32,6 @@ def load_module(self, fullname): warnings.simplefilter("ignore", DeprecationWarning) return self.loader.load_module(fullname) - def test_load_module_API(self): - # Test the default argument for load_module(). - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - self.loader.load_module() - self.loader.load_module(None) - with self.assertRaises(ImportError): - self.load_module('XXX') - def test_equality(self): other = self.machinery.ExtensionFileLoader(util.EXTENSIONS.name, util.EXTENSIONS.file_path) @@ -51,6 +42,15 @@ def test_inequality(self): util.EXTENSIONS.file_path) self.assertNotEqual(self.loader, other) + def test_load_module_API(self): + # Test the default argument for load_module(). + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + self.loader.load_module() + self.loader.load_module(None) + with self.assertRaises(ImportError): + self.load_module('XXX') + def test_module(self): with util.uncache(util.EXTENSIONS.name): module = self.load_module(util.EXTENSIONS.name) @@ -68,12 +68,6 @@ def test_module(self): # No extension module in a package available for testing. test_lacking_parent = None - def test_module_reuse(self): - with util.uncache(util.EXTENSIONS.name): - module1 = self.load_module(util.EXTENSIONS.name) - module2 = self.load_module(util.EXTENSIONS.name) - self.assertIs(module1, module2) - # No easy way to trigger a failure after a successful import. test_state_after_failure = None @@ -83,6 +77,12 @@ def test_unloadable(self): self.load_module(name) self.assertEqual(cm.exception.name, name) + def test_module_reuse(self): + with util.uncache(util.EXTENSIONS.name): + module1 = self.load_module(util.EXTENSIONS.name) + module2 = self.load_module(util.EXTENSIONS.name) + self.assertIs(module1, module2) + def test_is_package(self): self.assertFalse(self.loader.is_package(util.EXTENSIONS.name)) for suffix in self.machinery.EXTENSION_SUFFIXES: @@ -90,10 +90,93 @@ def test_is_package(self): loader = self.machinery.ExtensionFileLoader('pkg', path) self.assertTrue(loader.is_package('pkg')) + (Frozen_LoaderTests, Source_LoaderTests ) = util.test_both(LoaderTests, machinery=machinery) + +class SinglePhaseExtensionModuleTests(abc.LoaderTests): + # Test loading extension modules without multi-phase initialization. + + def setUp(self): + if not self.machinery.EXTENSION_SUFFIXES: + raise unittest.SkipTest("Requires dynamic loading support.") + self.name = '_testsinglephase' + if self.name in sys.builtin_module_names: + raise unittest.SkipTest( + f"{self.name} is a builtin module" + ) + finder = self.machinery.FileFinder(None) + self.spec = importlib.util.find_spec(self.name) + assert self.spec + self.loader = self.machinery.ExtensionFileLoader( + self.name, self.spec.origin) + + def load_module(self): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + return self.loader.load_module(self.name) + + def load_module_by_name(self, fullname): + # Load a module from the test extension by name. + origin = self.spec.origin + loader = self.machinery.ExtensionFileLoader(fullname, origin) + spec = importlib.util.spec_from_loader(fullname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + return module + + def test_module(self): + # Test loading an extension module. + with util.uncache(self.name): + module = self.load_module() + for attr, value in [('__name__', self.name), + ('__file__', self.spec.origin), + ('__package__', '')]: + self.assertEqual(getattr(module, attr), value) + with self.assertRaises(AttributeError): + module.__path__ + self.assertIs(module, sys.modules[self.name]) + self.assertIsInstance(module.__loader__, + self.machinery.ExtensionFileLoader) + + # No extension module as __init__ available for testing. + test_package = None + + # No extension module in a package available for testing. + test_lacking_parent = None + + # No easy way to trigger a failure after a successful import. + test_state_after_failure = None + + def test_unloadable(self): + name = 'asdfjkl;' + with self.assertRaises(ImportError) as cm: + self.load_module_by_name(name) + self.assertEqual(cm.exception.name, name) + + def test_unloadable_nonascii(self): + # Test behavior with nonexistent module with non-ASCII name. + name = 'fo\xf3' + with self.assertRaises(ImportError) as cm: + self.load_module_by_name(name) + self.assertEqual(cm.exception.name, name) + + # It may make sense to add the equivalent to + # the following MultiPhaseExtensionModuleTests tests: + # + # * test_nonmodule + # * test_nonmodule_with_methods + # * test_bad_modules + # * test_nonascii + + +(Frozen_SinglePhaseExtensionModuleTests, + Source_SinglePhaseExtensionModuleTests + ) = util.test_both(SinglePhaseExtensionModuleTests, machinery=machinery) + + class MultiPhaseExtensionModuleTests(abc.LoaderTests): # Test loading extension modules with multi-phase initialization (PEP 489). diff --git a/Lib/test/test_importlib/test_namespace_pkgs.py b/Lib/test/test_importlib/test_namespace_pkgs.py index f451f7547b35bb..65428c3d3ead9b 100644 --- a/Lib/test/test_importlib/test_namespace_pkgs.py +++ b/Lib/test/test_importlib/test_namespace_pkgs.py @@ -79,6 +79,10 @@ def test_cant_import_other(self): with self.assertRaises(ImportError): import foo.two + def test_simple_repr(self): + import foo.one + assert repr(foo).startswith("<module 'foo' (namespace) from [") + class DynamicPathNamespacePackage(NamespacePackageTest): paths = ['portion1'] diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py index 0b6dcc5eaf03d2..9032fd18d3f95b 100644 --- a/Lib/test/test_importlib/util.py +++ b/Lib/test/test_importlib/util.py @@ -27,7 +27,7 @@ EXTENSIONS.ext = None EXTENSIONS.filename = None EXTENSIONS.file_path = None -EXTENSIONS.name = '_testcapi' +EXTENSIONS.name = '_testsinglephase' def _extension_details(): global EXTENSIONS diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index cfc6e411ea680d..2b7977b1648f70 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -886,6 +886,12 @@ def test_class(self): self.assertSourceEqual(self.fodderModule.X, 1, 2) +class TestComplexDecorator(GetSourceBase): + fodderModule = mod2 + + def test_parens_in_decorator(self): + self.assertSourceEqual(self.fodderModule.complex_decorated, 273, 275) + class _BrokenDataDescriptor(object): """ A broken data descriptor. See bug #1785. @@ -2960,8 +2966,6 @@ def foo(a): pass self.assertEqual(str(inspect.signature(foo)), '(a)') def test_signature_on_decorated(self): - import functools - def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs) -> int: @@ -2973,6 +2977,8 @@ class Foo: def bar(self, a, b): pass + bar = decorator(Foo().bar) + self.assertEqual(self.signature(Foo.bar), ((('self', ..., ..., "positional_or_keyword"), ('a', ..., ..., "positional_or_keyword"), @@ -2991,6 +2997,11 @@ def bar(self, a, b): # from "func" to "wrapper", hence no # return_annotation + self.assertEqual(self.signature(bar), + ((('a', ..., ..., "positional_or_keyword"), + ('b', ..., ..., "positional_or_keyword")), + ...)) + # Test that we handle method wrappers correctly def decorator(func): @functools.wraps(func) diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py index f484c59f675f2a..334fea0774be51 100644 --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -2,10 +2,16 @@ import time import unittest +from unittest import mock from test import support from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) +try: + import _pylong +except ImportError: + _pylong = None + L = [ ('0', 0), ('1', 1), @@ -841,6 +847,39 @@ def test_pylong_str_to_int(self): with self.assertRaises(ValueError) as err: int('_' + s) + @support.cpython_only # tests implementation details of CPython. + @unittest.skipUnless(_pylong, "_pylong module required") + @mock.patch.object(_pylong, "int_to_decimal_string") + def test_pylong_misbehavior_error_path_to_str( + self, mock_int_to_str): + with support.adjust_int_max_str_digits(20_000): + big_value = int('7'*19_999) + mock_int_to_str.return_value = None # not a str + with self.assertRaises(TypeError) as ctx: + str(big_value) + self.assertIn('_pylong.int_to_decimal_string did not', + str(ctx.exception)) + mock_int_to_str.side_effect = RuntimeError("testABC") + with self.assertRaises(RuntimeError): + str(big_value) + + @support.cpython_only # tests implementation details of CPython. + @unittest.skipUnless(_pylong, "_pylong module required") + @mock.patch.object(_pylong, "int_from_string") + def test_pylong_misbehavior_error_path_from_str( + self, mock_int_from_str): + big_value = '7'*19_999 + with support.adjust_int_max_str_digits(20_000): + mock_int_from_str.return_value = b'not an int' + with self.assertRaises(TypeError) as ctx: + int(big_value) + self.assertIn('_pylong.int_from_string did not', + str(ctx.exception)) + + mock_int_from_str.side_effect = RuntimeError("test123") + with self.assertRaises(RuntimeError): + int(big_value) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 24c93b969ea2b7..c5f2e5060a546d 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -888,6 +888,14 @@ def badopener(fname, flags): open('non-existent', 'r', opener=badopener) self.assertEqual(str(cm.exception), 'opener returned -2') + def test_opener_invalid_fd(self): + # Check that OSError is raised with error code EBADF if the + # opener returns an invalid file descriptor (see gh-82212). + fd = os_helper.make_bad_fd() + with self.assertRaises(OSError) as cm: + self.open('foo', opener=lambda name, flags: fd) + self.assertEqual(cm.exception.errno, errno.EBADF) + def test_fileio_closefd(self): # Issue #4841 with self.open(__file__, 'rb') as f1, \ @@ -1531,11 +1539,25 @@ def test_no_extraneous_read(self): def test_read_on_closed(self): # Issue #23796 - b = io.BufferedReader(io.BytesIO(b"12")) + b = self.BufferedReader(self.BytesIO(b"12")) b.read(1) b.close() - self.assertRaises(ValueError, b.peek) - self.assertRaises(ValueError, b.read1, 1) + with self.subTest('peek'): + self.assertRaises(ValueError, b.peek) + with self.subTest('read1'): + self.assertRaises(ValueError, b.read1, 1) + with self.subTest('read'): + self.assertRaises(ValueError, b.read) + with self.subTest('readinto'): + self.assertRaises(ValueError, b.readinto, bytearray()) + with self.subTest('readinto1'): + self.assertRaises(ValueError, b.readinto1, bytearray()) + with self.subTest('flush'): + self.assertRaises(ValueError, b.flush) + with self.subTest('truncate'): + self.assertRaises(ValueError, b.truncate) + with self.subTest('seek'): + self.assertRaises(ValueError, b.seek, 0) def test_truncate_on_read_only(self): rawio = self.MockFileIO(b"abc") @@ -1593,10 +1615,10 @@ def test_garbage_collection(self): def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedReader"): - self.tp(io.BytesIO(), 1024, 1024, 1024) + self.tp(self.BytesIO(), 1024, 1024, 1024) def test_bad_readinto_value(self): - rawio = io.BufferedReader(io.BytesIO(b"12")) + rawio = self.tp(self.BytesIO(b"12")) rawio.readinto = lambda buf: -1 bufio = self.tp(rawio) with self.assertRaises(OSError) as cm: @@ -1604,7 +1626,7 @@ def test_bad_readinto_value(self): self.assertIsNone(cm.exception.__cause__) def test_bad_readinto_type(self): - rawio = io.BufferedReader(io.BytesIO(b"12")) + rawio = self.tp(self.BytesIO(b"12")) rawio.readinto = lambda buf: b'' bufio = self.tp(rawio) with self.assertRaises(OSError) as cm: @@ -1747,7 +1769,7 @@ def test_write_non_blocking(self): self.assertTrue(s.startswith(b"01234567A"), s) def test_write_and_rewind(self): - raw = io.BytesIO() + raw = self.BytesIO() bufio = self.tp(raw, 4) self.assertEqual(bufio.write(b"abcdef"), 6) self.assertEqual(bufio.tell(), 6) @@ -1957,7 +1979,7 @@ def test_garbage_collection(self): def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedWriter"): - self.tp(io.BytesIO(), 1024, 1024, 1024) + self.tp(self.BytesIO(), 1024, 1024, 1024) class PyBufferedWriterTest(BufferedWriterTest): @@ -2433,7 +2455,7 @@ def test_garbage_collection(self): def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedRandom"): - self.tp(io.BytesIO(), 1024, 1024, 1024) + self.tp(self.BytesIO(), 1024, 1024, 1024) class PyBufferedRandomTest(BufferedRandomTest): @@ -3465,7 +3487,7 @@ def test_illegal_encoder(self): # encode() is invalid shouldn't cause an assertion failure. rot13 = codecs.lookup("rot13") with support.swap_attr(rot13, '_is_text_encoding', True): - t = io.TextIOWrapper(io.BytesIO(b'foo'), encoding="rot13") + t = self.TextIOWrapper(self.BytesIO(b'foo'), encoding="rot13") self.assertRaises(TypeError, t.write, 'bar') def test_illegal_decoder(self): @@ -3571,7 +3593,7 @@ def seekable(self): return True t = self.TextIOWrapper(F(), encoding='utf-8') def test_reconfigure_locale(self): - wrapper = io.TextIOWrapper(io.BytesIO(b"test")) + wrapper = self.TextIOWrapper(self.BytesIO(b"test")) wrapper.reconfigure(encoding="locale") def test_reconfigure_encoding_read(self): @@ -3741,7 +3763,7 @@ def test_garbage_collection(self): # all data to disk. # The Python version has __del__, so it ends in gc.garbage instead. with warnings_helper.check_warnings(('', ResourceWarning)): - rawio = io.FileIO(os_helper.TESTFN, "wb") + rawio = self.FileIO(os_helper.TESTFN, "wb") b = self.BufferedWriter(rawio) t = self.TextIOWrapper(b, encoding="ascii") t.write("456def") @@ -3923,7 +3945,15 @@ def test_translate(self): self.assertEqual(decoder.decode(b"\r\r\n"), "\r\r\n") class CIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest): - pass + @support.cpython_only + def test_uninitialized(self): + uninitialized = self.IncrementalNewlineDecoder.__new__( + self.IncrementalNewlineDecoder) + self.assertRaises(ValueError, uninitialized.decode, b'bar') + self.assertRaises(ValueError, uninitialized.getstate) + self.assertRaises(ValueError, uninitialized.setstate, (b'foo', 0)) + self.assertRaises(ValueError, uninitialized.reset) + class PyIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest): pass diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index 5c656c49e2e75f..a5388b2e5debd8 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -2277,6 +2277,39 @@ def testReservedIpv4(self): self.assertEqual(False, ipaddress.ip_address('128.0.0.0').is_loopback) self.assertEqual(True, ipaddress.ip_network('0.0.0.0').is_unspecified) + def testPrivateNetworks(self): + self.assertEqual(False, ipaddress.ip_network("0.0.0.0/0").is_private) + self.assertEqual(False, ipaddress.ip_network("1.0.0.0/8").is_private) + + self.assertEqual(True, ipaddress.ip_network("0.0.0.0/8").is_private) + self.assertEqual(True, ipaddress.ip_network("10.0.0.0/8").is_private) + self.assertEqual(True, ipaddress.ip_network("127.0.0.0/8").is_private) + self.assertEqual(True, ipaddress.ip_network("169.254.0.0/16").is_private) + self.assertEqual(True, ipaddress.ip_network("172.16.0.0/12").is_private) + self.assertEqual(True, ipaddress.ip_network("192.0.0.0/29").is_private) + self.assertEqual(True, ipaddress.ip_network("192.0.0.170/31").is_private) + self.assertEqual(True, ipaddress.ip_network("192.0.2.0/24").is_private) + self.assertEqual(True, ipaddress.ip_network("192.168.0.0/16").is_private) + self.assertEqual(True, ipaddress.ip_network("198.18.0.0/15").is_private) + self.assertEqual(True, ipaddress.ip_network("198.51.100.0/24").is_private) + self.assertEqual(True, ipaddress.ip_network("203.0.113.0/24").is_private) + self.assertEqual(True, ipaddress.ip_network("240.0.0.0/4").is_private) + self.assertEqual(True, ipaddress.ip_network("255.255.255.255/32").is_private) + + self.assertEqual(False, ipaddress.ip_network("::/0").is_private) + self.assertEqual(False, ipaddress.ip_network("::ff/128").is_private) + + self.assertEqual(True, ipaddress.ip_network("::1/128").is_private) + self.assertEqual(True, ipaddress.ip_network("::/128").is_private) + self.assertEqual(True, ipaddress.ip_network("::ffff:0:0/96").is_private) + self.assertEqual(True, ipaddress.ip_network("100::/64").is_private) + self.assertEqual(True, ipaddress.ip_network("2001::/23").is_private) + self.assertEqual(True, ipaddress.ip_network("2001:2::/48").is_private) + self.assertEqual(True, ipaddress.ip_network("2001:db8::/32").is_private) + self.assertEqual(True, ipaddress.ip_network("2001:10::/28").is_private) + self.assertEqual(True, ipaddress.ip_network("fc00::/7").is_private) + self.assertEqual(True, ipaddress.ip_network("fe80::/10").is_private) + def testReservedIpv6(self): self.assertEqual(True, ipaddress.ip_network('ffff::').is_multicast) diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index a0a740fba8e8e3..b447b6cbab9c22 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -161,11 +161,11 @@ def test_accumulate(self): def test_batched(self): self.assertEqual(list(batched('ABCDEFG', 3)), - [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]) + [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]) self.assertEqual(list(batched('ABCDEFG', 2)), - [['A', 'B'], ['C', 'D'], ['E', 'F'], ['G']]) + [('A', 'B'), ('C', 'D'), ('E', 'F'), ('G',)]) self.assertEqual(list(batched('ABCDEFG', 1)), - [['A'], ['B'], ['C'], ['D'], ['E'], ['F'], ['G']]) + [('A',), ('B',), ('C',), ('D',), ('E',), ('F',), ('G',)]) with self.assertRaises(TypeError): # Too few arguments list(batched('ABCDEFG')) @@ -188,8 +188,8 @@ def test_batched(self): with self.subTest(s=s, n=n, batches=batches): # Order is preserved and no data is lost self.assertEqual(''.join(chain(*batches)), s) - # Each batch is an exact list - self.assertTrue(all(type(batch) is list for batch in batches)) + # Each batch is an exact tuple + self.assertTrue(all(type(batch) is tuple for batch in batches)) # All but the last batch is of size n if batches: last_batch = batches.pop() @@ -675,6 +675,7 @@ def test_cycle(self): self.assertRaises(TypeError, cycle, 5) self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0]) + def test_cycle_copy_pickle(self): # check copy, deepcopy, pickle c = cycle('abc') self.assertEqual(next(c), 'a') @@ -710,6 +711,37 @@ def test_cycle(self): d = pickle.loads(p) # rebuild the cycle object self.assertEqual(take(20, d), list('cdeabcdeabcdeabcdeab')) + def test_cycle_unpickle_compat(self): + testcases = [ + b'citertools\ncycle\n(c__builtin__\niter\n((lI1\naI2\naI3\natRI1\nbtR((lI1\naI0\ntb.', + b'citertools\ncycle\n(c__builtin__\niter\n(](K\x01K\x02K\x03etRK\x01btR(]K\x01aK\x00tb.', + b'\x80\x02citertools\ncycle\nc__builtin__\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01aK\x00\x86b.', + b'\x80\x03citertools\ncycle\ncbuiltins\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01aK\x00\x86b.', + b'\x80\x04\x95=\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01aK\x00\x86b.', + + b'citertools\ncycle\n(c__builtin__\niter\n((lp0\nI1\naI2\naI3\natRI1\nbtR(g0\nI1\ntb.', + b'citertools\ncycle\n(c__builtin__\niter\n(]q\x00(K\x01K\x02K\x03etRK\x01btR(h\x00K\x01tb.', + b'\x80\x02citertools\ncycle\nc__builtin__\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00K\x01\x86b.', + b'\x80\x03citertools\ncycle\ncbuiltins\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00K\x01\x86b.', + b'\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93]\x94(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00K\x01\x86b.', + + b'citertools\ncycle\n(c__builtin__\niter\n((lI1\naI2\naI3\natRI1\nbtR((lI1\naI00\ntb.', + b'citertools\ncycle\n(c__builtin__\niter\n(](K\x01K\x02K\x03etRK\x01btR(]K\x01aI00\ntb.', + b'\x80\x02citertools\ncycle\nc__builtin__\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01a\x89\x86b.', + b'\x80\x03citertools\ncycle\ncbuiltins\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01a\x89\x86b.', + b'\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01a\x89\x86b.', + + b'citertools\ncycle\n(c__builtin__\niter\n((lp0\nI1\naI2\naI3\natRI1\nbtR(g0\nI01\ntb.', + b'citertools\ncycle\n(c__builtin__\niter\n(]q\x00(K\x01K\x02K\x03etRK\x01btR(h\x00I01\ntb.', + b'\x80\x02citertools\ncycle\nc__builtin__\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00\x88\x86b.', + b'\x80\x03citertools\ncycle\ncbuiltins\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00\x88\x86b.', + b'\x80\x04\x95;\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93]\x94(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00\x88\x86b.', + ] + assert len(testcases) == 20 + for t in testcases: + it = pickle.loads(t) + self.assertEqual(take(10, it), [2, 3, 1, 2, 3, 1, 2, 3, 1, 2]) + def test_cycle_setstate(self): # Verify both modes for restoring state @@ -1777,12 +1809,12 @@ class TestPurePythonRoughEquivalents(unittest.TestCase): def test_batched_recipe(self): def batched_recipe(iterable, n): - "Batch data into lists of length n. The last batch may be shorter." + "Batch data into tuples of length n. The last batch may be shorter." # batched('ABCDEFG', 3) --> ABC DEF G if n < 1: raise ValueError('n must be at least one') it = iter(iterable) - while (batch := list(islice(it, n))): + while (batch := tuple(islice(it, n))): yield batch for iterable, n in product( @@ -2055,7 +2087,7 @@ def test_accumulate(self): def test_batched(self): s = 'abcde' - r = [['a', 'b'], ['c', 'd'], ['e']] + r = [('a', 'b'), ('c', 'd'), ('e',)] n = 2 for g in (G, I, Ig, L, R): with self.subTest(g=g): diff --git a/Lib/test/test_launcher.py b/Lib/test/test_launcher.py index 6ad85dc9c300e1..47152d4a3c00e6 100644 --- a/Lib/test/test_launcher.py +++ b/Lib/test/test_launcher.py @@ -173,7 +173,7 @@ def find_py(cls): errors="ignore", ) as p: p.stdin.close() - version = next(p.stdout).splitlines()[0].rpartition(" ")[2] + version = next(p.stdout, "\n").splitlines()[0].rpartition(" ")[2] p.stdout.read() p.wait(10) if not sys.version.startswith(version): @@ -467,6 +467,15 @@ def test_py3_default_env(self): self.assertEqual("3.100-arm64", data["SearchInfo.tag"]) self.assertEqual("X.Y-arm64.exe -X fake_arg_for_test -arg", data["stdout"].strip()) + def test_py_default_short_argv0(self): + with self.py_ini(TEST_PY_COMMANDS): + for argv0 in ['"py.exe"', 'py.exe', '"py"', 'py']: + with self.subTest(argv0): + data = self.run_py(["--version"], argv=f'{argv0} --version') + self.assertEqual("PythonTestSuite", data["SearchInfo.company"]) + self.assertEqual("3.100", data["SearchInfo.tag"]) + self.assertEqual(f'X.Y.exe --version', data["stdout"].strip()) + def test_py_default_in_list(self): data = self.run_py(["-0"], env=TEST_PY_ENV) default = None diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py index 9aa6dd1095668b..2969c6e2f98a23 100644 --- a/Lib/test/test_list.py +++ b/Lib/test/test_list.py @@ -103,7 +103,7 @@ def test_list_resize_overflow(self): del lst[1:] self.assertEqual(len(lst), 1) - size = ((2 ** (tuple.__itemsize__ * 8) - 1) // 2) + size = sys.maxsize with self.assertRaises((MemoryError, OverflowError)): lst * size with self.assertRaises((MemoryError, OverflowError)): diff --git a/Lib/test/test_lltrace.py b/Lib/test/test_lltrace.py index 7cf89846f8a727..747666e256700e 100644 --- a/Lib/test/test_lltrace.py +++ b/Lib/test/test_lltrace.py @@ -8,7 +8,7 @@ def example(): x = [] - for i in range(1): + for i in range(0): x.append(i) x = "this is" y = "an example" @@ -75,7 +75,7 @@ def test_lltrace_different_module(self): self.assertIn('this is an example', stdout) # check that offsets match the output of dis.dis() - instr_map = {i.offset: i for i in dis.get_instructions(example)} + instr_map = {i.offset: i for i in dis.get_instructions(example, adaptive=True)} for line in stdout.splitlines(): offset, colon, opname_oparg = line.partition(":") if not colon: diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index d70bfd6b09e13d..072056d3722106 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -47,6 +47,7 @@ from test.support import socket_helper from test.support import threading_helper from test.support import warnings_helper +from test.support import asyncore from test.support.logging_helper import TestHandler import textwrap import threading @@ -64,9 +65,6 @@ with warnings.catch_warnings(): from . import smtpd -asyncore = warnings_helper.import_deprecated('asyncore') - - try: import win32evtlog, win32evtlogutil, pywintypes except ImportError: diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py index 145c8cfced4080..18f474ba2a8bdc 100644 --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -825,10 +825,7 @@ def test_read_0(self): def test_read_10(self): with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: chunks = [] - while True: - result = f.read(10) - if not result: - break + while result := f.read(10): self.assertLessEqual(len(result), 10) chunks.append(result) self.assertEqual(b"".join(chunks), INPUT) @@ -911,10 +908,7 @@ def test_read_bad_data(self): def test_read1(self): with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: blocks = [] - while True: - result = f.read1() - if not result: - break + while result := f.read1(): blocks.append(result) self.assertEqual(b"".join(blocks), INPUT) self.assertEqual(f.read1(), b"") @@ -926,10 +920,7 @@ def test_read1_0(self): def test_read1_10(self): with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: blocks = [] - while True: - result = f.read1(10) - if not result: - break + while result := f.read1(10): blocks.append(result) self.assertEqual(b"".join(blocks), INPUT) self.assertEqual(f.read1(), b"") @@ -937,10 +928,7 @@ def test_read1_10(self): def test_read1_multistream(self): with LZMAFile(BytesIO(COMPRESSED_XZ * 5)) as f: blocks = [] - while True: - result = f.read1() - if not result: - break + while result := f.read1(): blocks.append(result) self.assertEqual(b"".join(blocks), INPUT * 5) self.assertEqual(f.read1(), b"") diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py index fe4f368bed42f4..3d9d6d5d0aca34 100644 --- a/Lib/test/test_marshal.py +++ b/Lib/test/test_marshal.py @@ -260,6 +260,8 @@ def test_recursion_limit(self): #if os.name == 'nt' and support.Py_DEBUG: if os.name == 'nt': MAX_MARSHAL_STACK_DEPTH = 1000 + elif sys.platform == 'wasi': + MAX_MARSHAL_STACK_DEPTH = 1500 else: MAX_MARSHAL_STACK_DEPTH = 2000 for i in range(MAX_MARSHAL_STACK_DEPTH - 2): @@ -353,7 +355,7 @@ def test_deterministic_sets(self): for elements in ( "float('nan'), b'a', b'b', b'c', 'x', 'y', 'z'", # Also test for bad interactions with backreferencing: - "('Spam', 0), ('Spam', 1), ('Spam', 2)", + "('Spam', 0), ('Spam', 1), ('Spam', 2), ('Spam', 3), ('Spam', 4), ('Spam', 5)", ): s = f"{kind}([{elements}])" with self.subTest(s): diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py index ef38c362103fc6..2ca3908bd1caac 100644 --- a/Lib/test/test_minidom.py +++ b/Lib/test/test_minidom.py @@ -1163,14 +1163,10 @@ def testEncodings(self): # Verify that character decoding errors raise exceptions instead # of crashing - if pyexpat.version_info >= (2, 4, 5): - self.assertRaises(ExpatError, parseString, - b'<fran\xe7ais></fran\xe7ais>') - self.assertRaises(ExpatError, parseString, - b'<franais>Comment \xe7a va ? Tr\xe8s bien ?</franais>') - else: - self.assertRaises(UnicodeDecodeError, parseString, - b'<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>') + with self.assertRaises((UnicodeDecodeError, ExpatError)): + parseString( + b'<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>' + ) doc.unlink() @@ -1631,13 +1627,11 @@ def testEmptyXMLNSValue(self): self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE) def testExceptionOnSpacesInXMLNSValue(self): - if pyexpat.version_info >= (2, 4, 5): - context = self.assertRaisesRegex(ExpatError, 'syntax error') - else: - context = self.assertRaisesRegex(ValueError, 'Unsupported syntax') - - with context: - parseString('<element xmlns:abc="http:abc.com/de f g/hi/j k"><abc:foo /></element>') + with self.assertRaises((ValueError, ExpatError)): + parseString( + '<element xmlns:abc="http:abc.com/de f g/hi/j k">' + + '<abc:foo /></element>' + ) def testDocRemoveChild(self): doc = parse(tstfile) diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index d51946322c8056..336648273b6cf1 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -856,6 +856,23 @@ def test_nt_helpers(self): self.assertIsInstance(b_final_path, bytes) self.assertGreater(len(b_final_path), 0) + @unittest.skipIf(sys.platform != 'win32', "Can only test junctions with creation on win32.") + def test_isjunction(self): + with os_helper.temp_dir() as d: + with os_helper.change_cwd(d): + os.mkdir('tmpdir') + + import _winapi + try: + _winapi.CreateJunction('tmpdir', 'testjunc') + except OSError: + raise unittest.SkipTest('creating the test junction failed') + + self.assertTrue(ntpath.isjunction('testjunc')) + self.assertFalse(ntpath.isjunction('tmpdir')) + self.assertPathEqual(ntpath.realpath('testjunc'), ntpath.realpath('tmpdir')) + + class NtCommonTest(test_genericpath.CommonTest, unittest.TestCase): pathmodule = ntpath attributes = ['relpath'] diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 5c032d59b13f16..e39b7260624899 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -177,6 +177,73 @@ def f(): for _ in range(1025): self.assertFalse(f()) + def test_load_shadowing_slot_should_raise_type_error(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + __slots__ = ("shadowed",) + shadowing = Class.slot + + def f(o): + o.shadowing + + o = Sneaky() + o.shadowed = 42 + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + + def test_store_shadowing_slot_should_raise_type_error(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + __slots__ = ("shadowed",) + shadowing = Class.slot + + def f(o): + o.shadowing = 42 + + o = Sneaky() + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + + def test_load_borrowed_slot_should_not_crash(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + borrowed = Class.slot + + def f(o): + o.borrowed + + o = Sneaky() + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + + def test_store_borrowed_slot_should_not_crash(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + borrowed = Class.slot + + def f(o): + o.borrowed = 42 + + o = Sneaky() + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + class TestLoadMethodCache(unittest.TestCase): def test_descriptor_added_after_optimization(self): diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 10dbb2bd15dfab..e0577916428a08 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -606,12 +606,13 @@ def test_stat_attributes_bytes(self): def test_stat_result_pickle(self): result = os.stat(self.fname) for proto in range(pickle.HIGHEST_PROTOCOL + 1): - p = pickle.dumps(result, proto) - self.assertIn(b'stat_result', p) - if proto < 4: - self.assertIn(b'cos\nstat_result\n', p) - unpickled = pickle.loads(p) - self.assertEqual(result, unpickled) + with self.subTest(f'protocol {proto}'): + p = pickle.dumps(result, proto) + self.assertIn(b'stat_result', p) + if proto < 4: + self.assertIn(b'cos\nstat_result\n', p) + unpickled = pickle.loads(p) + self.assertEqual(result, unpickled) @unittest.skipUnless(hasattr(os, 'statvfs'), 'test needs os.statvfs()') def test_statvfs_attributes(self): @@ -3797,8 +3798,6 @@ class Str(str): else: encoded = os.fsencode(os_helper.TESTFN) self.bytes_filenames.append(encoded) - self.bytes_filenames.append(bytearray(encoded)) - self.bytes_filenames.append(memoryview(encoded)) self.filenames = self.bytes_filenames + self.unicode_filenames @@ -3810,21 +3809,10 @@ def test_oserror_filename(self): (self.filenames, os.rmdir,), (self.filenames, os.stat,), (self.filenames, os.unlink,), + (self.filenames, os.listdir,), + (self.filenames, os.rename, "dst"), + (self.filenames, os.replace, "dst"), ] - if sys.platform == "win32": - funcs.extend(( - (self.bytes_filenames, os.rename, b"dst"), - (self.bytes_filenames, os.replace, b"dst"), - (self.unicode_filenames, os.rename, "dst"), - (self.unicode_filenames, os.replace, "dst"), - (self.unicode_filenames, os.listdir, ), - )) - else: - funcs.extend(( - (self.filenames, os.listdir,), - (self.filenames, os.rename, "dst"), - (self.filenames, os.replace, "dst"), - )) if os_helper.can_chmod(): funcs.append((self.filenames, os.chmod, 0o777)) if hasattr(os, "chown"): @@ -3840,11 +3828,7 @@ def test_oserror_filename(self): if hasattr(os, "chroot"): funcs.append((self.filenames, os.chroot,)) if hasattr(os, "link"): - if sys.platform == "win32": - funcs.append((self.bytes_filenames, os.link, b"dst")) - funcs.append((self.unicode_filenames, os.link, "dst")) - else: - funcs.append((self.filenames, os.link, "dst")) + funcs.append((self.filenames, os.link, "dst")) if hasattr(os, "listxattr"): funcs.extend(( (self.filenames, os.listxattr,), @@ -3857,21 +3841,16 @@ def test_oserror_filename(self): if hasattr(os, "readlink"): funcs.append((self.filenames, os.readlink,)) - for filenames, func, *func_args in funcs: for name in filenames: - if not isinstance(name, (str, bytes)): - with self.assertRaises(TypeError): - func(name, *func_args) + try: + func(name, *func_args) + except OSError as err: + self.assertIs(err.filename, name, str(func)) + except UnicodeDecodeError: + pass else: - try: - func(name, *func_args) - except OSError as err: - self.assertIs(err.filename, name, str(func)) - except UnicodeDecodeError: - pass - else: - self.fail("No exception thrown by {}".format(func)) + self.fail(f"No exception thrown by {func}") class CPUCountTests(unittest.TestCase): def test_cpu_count(self): @@ -4180,6 +4159,8 @@ def check_entry(self, entry, name, is_dir, is_file, is_symlink): self.assertEqual(entry.is_file(follow_symlinks=False), stat.S_ISREG(entry_lstat.st_mode)) + self.assertEqual(entry.is_junction(), os.path.isjunction(entry.path)) + self.assert_stat_equal(entry.stat(), entry_stat, os.name == 'nt' and not is_symlink) @@ -4228,6 +4209,21 @@ def test_attributes(self): entry = entries['symlink_file.txt'] self.check_entry(entry, 'symlink_file.txt', False, True, True) + @unittest.skipIf(sys.platform != 'win32', "Can only test junctions with creation on win32.") + def test_attributes_junctions(self): + dirname = os.path.join(self.path, "tgtdir") + os.mkdir(dirname) + + import _winapi + try: + _winapi.CreateJunction(dirname, os.path.join(self.path, "srcjunc")) + except OSError: + raise unittest.SkipTest('creating the test junction failed') + + entries = self.get_entries(['srcjunc', 'tgtdir']) + self.assertEqual(entries['srcjunc'].is_junction(), True) + self.assertEqual(entries['tgtdir'].is_junction(), False) + def get_entry(self, name): path = self.bytes_path if isinstance(name, bytes) else self.path entries = list(os.scandir(path)) @@ -4351,7 +4347,7 @@ def test_bytes_like(self): for cls in bytearray, memoryview: path_bytes = cls(os.fsencode(self.path)) with self.assertRaises(TypeError): - list(os.scandir(path_bytes)) + os.scandir(path_bytes) @unittest.skipUnless(os.listdir in os.supports_fd, 'fd support for listdir required for this test.') diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index 3b1f302cc964ba..1d01d3cbd91d14 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -1183,10 +1183,6 @@ def test_relative_to(self): self.assertRaises(ValueError, p.relative_to, P('/Foo'), walk_up=True) self.assertRaises(ValueError, p.relative_to, P('C:/Foo'), walk_up=True) p = P('C:/Foo/Bar') - self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar')) - self.assertEqual(p.relative_to('c:'), P('/Foo/Bar')) - self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar') - self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar') self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar')) self.assertEqual(p.relative_to('c:/'), P('Foo/Bar')) self.assertEqual(p.relative_to(P('c:/foO')), P('Bar')) @@ -1194,10 +1190,6 @@ def test_relative_to(self): self.assertEqual(p.relative_to('c:/foO/'), P('Bar')) self.assertEqual(p.relative_to(P('c:/foO/baR')), P()) self.assertEqual(p.relative_to('c:/foO/baR'), P()) - self.assertEqual(p.relative_to(P('c:'), walk_up=True), P('/Foo/Bar')) - self.assertEqual(p.relative_to('c:', walk_up=True), P('/Foo/Bar')) - self.assertEqual(str(p.relative_to(P('c:'), walk_up=True)), '\\Foo\\Bar') - self.assertEqual(str(p.relative_to('c:', walk_up=True)), '\\Foo\\Bar') self.assertEqual(p.relative_to(P('c:/'), walk_up=True), P('Foo/Bar')) self.assertEqual(p.relative_to('c:/', walk_up=True), P('Foo/Bar')) self.assertEqual(p.relative_to(P('c:/foO'), walk_up=True), P('Bar')) @@ -1209,6 +1201,8 @@ def test_relative_to(self): self.assertEqual(p.relative_to('C:/Foo/Bar/Baz', walk_up=True), P('..')) self.assertEqual(p.relative_to('C:/Foo/Baz', walk_up=True), P('../Bar')) # Unrelated paths. + self.assertRaises(ValueError, p.relative_to, 'c:') + self.assertRaises(ValueError, p.relative_to, P('c:')) self.assertRaises(ValueError, p.relative_to, P('C:/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz')) @@ -1218,6 +1212,8 @@ def test_relative_to(self): self.assertRaises(ValueError, p.relative_to, P('/')) self.assertRaises(ValueError, p.relative_to, P('/Foo')) self.assertRaises(ValueError, p.relative_to, P('//C/Foo')) + self.assertRaises(ValueError, p.relative_to, 'c:', walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('c:'), walk_up=True) self.assertRaises(ValueError, p.relative_to, P('C:Foo'), walk_up=True) self.assertRaises(ValueError, p.relative_to, P('d:'), walk_up=True) self.assertRaises(ValueError, p.relative_to, P('d:/'), walk_up=True) @@ -1275,13 +1271,13 @@ def test_is_relative_to(self): self.assertFalse(p.is_relative_to(P('C:Foo/Bar/Baz'))) self.assertFalse(p.is_relative_to(P('C:Foo/Baz'))) p = P('C:/Foo/Bar') - self.assertTrue(p.is_relative_to('c:')) self.assertTrue(p.is_relative_to(P('c:/'))) self.assertTrue(p.is_relative_to(P('c:/foO'))) self.assertTrue(p.is_relative_to('c:/foO/')) self.assertTrue(p.is_relative_to(P('c:/foO/baR'))) self.assertTrue(p.is_relative_to('c:/foO/baR')) # Unrelated paths. + self.assertFalse(p.is_relative_to('c:')) self.assertFalse(p.is_relative_to(P('C:/Baz'))) self.assertFalse(p.is_relative_to(P('C:/Foo/Bar/Baz'))) self.assertFalse(p.is_relative_to(P('C:/Foo/Baz'))) @@ -2411,6 +2407,13 @@ def test_is_symlink(self): self.assertIs((P / 'linkA\udfff').is_file(), False) self.assertIs((P / 'linkA\x00').is_file(), False) + def test_is_junction(self): + P = self.cls(BASE) + + with mock.patch.object(P._flavour, 'pathmod'): + self.assertEqual(P.is_junction(), P._flavour.pathmod.isjunction.return_value) + P._flavour.pathmod.isjunction.assert_called_once_with(P) + def test_is_fifo_false(self): P = self.cls(BASE) self.assertFalse((P / 'fileA').is_fifo()) diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index 7363452f5e132f..239c9d03fd9d1f 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -815,15 +815,15 @@ def f(): self.assertInBytecode(f, 'LOAD_FAST_CHECK', "a73") self.assertInBytecode(f, 'LOAD_FAST', "a73") - def test_setting_lineno_adds_check(self): - code = textwrap.dedent("""\ + def test_setting_lineno_no_undefined(self): + code = textwrap.dedent(f"""\ def f(): - x = 2 - L = 3 - L = 4 + x = y = 2 + if not x: + return 4 for i in range(55): x + 6 - del x + L = 7 L = 8 L = 9 L = 10 @@ -832,15 +832,88 @@ def f(): exec(code, ns) f = ns['f'] self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + co_code = f.__code__.co_code def trace(frame, event, arg): if event == 'line' and frame.f_lineno == 9: - frame.f_lineno = 2 + frame.f_lineno = 3 sys.settrace(None) return None return trace sys.settrace(trace) - f() - self.assertNotInBytecode(f, "LOAD_FAST") + result = f() + self.assertIsNone(result) + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) + + def test_setting_lineno_one_undefined(self): + code = textwrap.dedent(f"""\ + def f(): + x = y = 2 + if not x: + return 4 + for i in range(55): + x + 6 + del x + L = 8 + L = 9 + L = 10 + """) + ns = {} + exec(code, ns) + f = ns['f'] + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + co_code = f.__code__.co_code + def trace(frame, event, arg): + if event == 'line' and frame.f_lineno == 9: + frame.f_lineno = 3 + sys.settrace(None) + return None + return trace + e = r"assigning None to 1 unbound local" + with self.assertWarnsRegex(RuntimeWarning, e): + sys.settrace(trace) + result = f() + self.assertEqual(result, 4) + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) + + def test_setting_lineno_two_undefined(self): + code = textwrap.dedent(f"""\ + def f(): + x = y = 2 + if not x: + return 4 + for i in range(55): + x + 6 + del x, y + L = 8 + L = 9 + L = 10 + """) + ns = {} + exec(code, ns) + f = ns['f'] + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + co_code = f.__code__.co_code + def trace(frame, event, arg): + if event == 'line' and frame.f_lineno == 9: + frame.f_lineno = 3 + sys.settrace(None) + return None + return trace + e = r"assigning None to 2 unbound locals" + with self.assertWarnsRegex(RuntimeWarning, e): + sys.settrace(trace) + result = f() + self.assertEqual(result, 4) + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) def make_function_with_no_checks(self): code = textwrap.dedent("""\ @@ -860,18 +933,22 @@ def f(): self.assertNotInBytecode(f, "LOAD_FAST_CHECK") return f - def test_deleting_local_adds_check(self): + def test_deleting_local_warns_and_assigns_none(self): f = self.make_function_with_no_checks() + co_code = f.__code__.co_code def trace(frame, event, arg): if event == 'line' and frame.f_lineno == 4: del frame.f_locals["x"] sys.settrace(None) return None return trace - sys.settrace(trace) - f() - self.assertNotInBytecode(f, "LOAD_FAST") - self.assertInBytecode(f, "LOAD_FAST_CHECK") + e = r"assigning None to unbound local 'x'" + with self.assertWarnsRegex(RuntimeWarning, e): + sys.settrace(trace) + f() + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) def test_modifying_local_does_not_add_check(self): f = self.make_function_with_no_checks() @@ -907,7 +984,7 @@ def cfg_optimization_test(self, insts, expected_insts, if expected_consts is None: expected_consts = consts opt_insts, opt_consts = self.get_optimized(insts, consts) - self.compareInstructions(opt_insts, expected_insts) + self.assertInstructionsMatch(opt_insts, expected_insts) self.assertEqual(opt_consts, expected_consts) def test_conditional_jump_forward_non_const_condition(self): diff --git a/Lib/test/test_peg_generator/__init__.py b/Lib/test/test_peg_generator/__init__.py index 77f72fcc7c6e3b..7c402c3d7c5acf 100644 --- a/Lib/test/test_peg_generator/__init__.py +++ b/Lib/test/test_peg_generator/__init__.py @@ -3,6 +3,9 @@ from test import support from test.support import load_package_tests +# TODO: gh-92584: peg_generator uses distutils which was removed in Python 3.12 +raise unittest.SkipTest("distutils has been removed in Python 3.12") + if support.check_sanitizer(address=True, memory=True): # bpo-46633: Skip the test because it is too slow when Python is built diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 9c03a89fd57d07..3992faf8e5cd5b 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -277,6 +277,14 @@ def test_uname_slices(self): self.assertEqual(res[:], expected) self.assertEqual(res[:5], expected[:5]) + def test_uname_fields(self): + self.assertIn('processor', platform.uname()._fields) + + def test_uname_asdict(self): + res = platform.uname()._asdict() + self.assertEqual(len(res), 6) + self.assertIn('processor', res) + @unittest.skipIf(sys.platform in ['win32', 'OpenVMS'], "uname -p not used") @support.requires_subprocess() def test_uname_processor(self): diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py index 5ad9202433dcfb..fa41ba0b6e4637 100644 --- a/Lib/test/test_poplib.py +++ b/Lib/test/test_poplib.py @@ -15,11 +15,8 @@ from test.support import hashlib_helper from test.support import socket_helper from test.support import threading_helper -from test.support import warnings_helper - - -asynchat = warnings_helper.import_deprecated('asynchat') -asyncore = warnings_helper.import_deprecated('asyncore') +from test.support import asynchat +from test.support import asyncore test_support.requires_working_socket(module=True) @@ -425,13 +422,6 @@ def test_context(self): ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE - self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host, - self.server.port, keyfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host, - self.server.port, certfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host, - self.server.port, keyfile=CERTFILE, - certfile=CERTFILE, context=ctx) self.client.quit() self.client = poplib.POP3_SSL(self.server.host, self.server.port, diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 442dec8b28065b..77f42f7f9c937b 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -1646,12 +1646,6 @@ def test_resetids(self): ) support.wait_process(pid, exitcode=0) - def test_resetids_wrong_type(self): - with self.assertRaises(TypeError): - self.spawn_func(sys.executable, - [sys.executable, "-c", "pass"], - os.environ, resetids=None) - def test_setpgroup(self): pid = self.spawn_func( sys.executable, diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index c644f881e460fe..6c1c0f5577b7ec 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -178,6 +178,8 @@ def test_islink(self): def test_ismount(self): self.assertIs(posixpath.ismount("/"), True) self.assertIs(posixpath.ismount(b"/"), True) + self.assertIs(posixpath.ismount(FakePath("/")), True) + self.assertIs(posixpath.ismount(FakePath(b"/")), True) def test_ismount_non_existent(self): # Non-existent mountpoint. @@ -242,6 +244,9 @@ def fake_lstat(path): finally: os.lstat = save_lstat + def test_isjunction(self): + self.assertFalse(posixpath.isjunction(ABSTFN)) + def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py index 851ad5b7c2f485..7be76b32ac2935 100644 --- a/Lib/test/test_range.py +++ b/Lib/test/test_range.py @@ -407,11 +407,7 @@ def test_iterator_pickling_overflowing_index(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): with self.subTest(proto=proto): it = iter(range(2**32 + 2)) - _, _, idx = it.__reduce__() - self.assertEqual(idx, 0) - it.__setstate__(2**32 + 1) # undocumented way to set r->index - _, _, idx = it.__reduce__() - self.assertEqual(idx, 2**32 + 1) + it.__setstate__(2**32 + 1) # undocumented way to advance an iterator d = pickle.dumps(it, proto) it = pickle.loads(d) self.assertEqual(next(it), 2**32 + 1) @@ -442,6 +438,38 @@ def test_large_exhausted_iterator_pickling(self): self.assertEqual(list(i), []) self.assertEqual(list(i2), []) + def test_iterator_unpickle_compat(self): + testcases = [ + b'c__builtin__\niter\n(c__builtin__\nxrange\n(I10\nI20\nI2\ntRtRI2\nb.', + b'c__builtin__\niter\n(c__builtin__\nxrange\n(K\nK\x14K\x02tRtRK\x02b.', + b'\x80\x02c__builtin__\niter\nc__builtin__\nxrange\nK\nK\x14K\x02\x87R\x85RK\x02b.', + b'\x80\x03cbuiltins\niter\ncbuiltins\nrange\nK\nK\x14K\x02\x87R\x85RK\x02b.', + b'\x80\x04\x951\x00\x00\x00\x00\x00\x00\x00\x8c\x08builtins\x8c\x04iter\x93\x8c\x08builtins\x8c\x05range\x93K\nK\x14K\x02\x87R\x85RK\x02b.', + + b'c__builtin__\niter\n(c__builtin__\nxrange\n(L-36893488147419103232L\nI20\nI2\ntRtRL18446744073709551623L\nb.', + b'c__builtin__\niter\n(c__builtin__\nxrange\n(L-36893488147419103232L\nK\x14K\x02tRtRL18446744073709551623L\nb.', + b'\x80\x02c__builtin__\niter\nc__builtin__\nxrange\n\x8a\t\x00\x00\x00\x00\x00\x00\x00\x00\xfeK\x14K\x02\x87R\x85R\x8a\t\x07\x00\x00\x00\x00\x00\x00\x00\x01b.', + b'\x80\x03cbuiltins\niter\ncbuiltins\nrange\n\x8a\t\x00\x00\x00\x00\x00\x00\x00\x00\xfeK\x14K\x02\x87R\x85R\x8a\t\x07\x00\x00\x00\x00\x00\x00\x00\x01b.', + b'\x80\x04\x95C\x00\x00\x00\x00\x00\x00\x00\x8c\x08builtins\x8c\x04iter\x93\x8c\x08builtins\x8c\x05range\x93\x8a\t\x00\x00\x00\x00\x00\x00\x00\x00\xfeK\x14K\x02\x87R\x85R\x8a\t\x07\x00\x00\x00\x00\x00\x00\x00\x01b.', + ] + for t in testcases: + it = pickle.loads(t) + self.assertEqual(list(it), [14, 16, 18]) + + def test_iterator_setstate(self): + it = iter(range(10, 20, 2)) + it.__setstate__(2) + self.assertEqual(list(it), [14, 16, 18]) + it = reversed(range(10, 20, 2)) + it.__setstate__(3) + self.assertEqual(list(it), [12, 10]) + it = iter(range(-2**65, 20, 2)) + it.__setstate__(2**64 + 7) + self.assertEqual(list(it), [14, 16, 18]) + it = reversed(range(10, 2**65, 2)) + it.__setstate__(2**64 - 7) + self.assertEqual(list(it), [12, 10]) + def test_odd_bug(self): # This used to raise a "SystemError: NULL result without error" # because the range validation step was eating the exception diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index 3f0f84ea8cee6f..11628a236ade9a 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -630,6 +630,11 @@ def test_re_groupref_exists_errors(self): self.checkPatternError(r'()(?(2)a)', "invalid group reference 2", 5) + def test_re_groupref_exists_validation_bug(self): + for i in range(256): + with self.subTest(code=i): + re.compile(r'()(?(1)\x%02x?)' % i) + def test_re_groupref_overflow(self): from re._constants import MAXGROUPS self.checkTemplateError('()', r'\g<%s>' % MAXGROUPS, 'xx', diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index a36d18488a5ef7..baae4efc2ad789 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -28,6 +28,11 @@ ROOT_DIR = os.path.abspath(os.path.normpath(ROOT_DIR)) LOG_PREFIX = r'[0-9]+:[0-9]+:[0-9]+ (?:load avg: [0-9]+\.[0-9]{2} )?' +EXITCODE_BAD_TEST = 2 +EXITCODE_ENV_CHANGED = 3 +EXITCODE_NO_TESTS_RAN = 4 +EXITCODE_INTERRUPTED = 130 + TEST_INTERRUPTED = textwrap.dedent(""" from signal import SIGINT, raise_signal try: @@ -497,7 +502,7 @@ def list_regex(line_format, tests): result.append('INTERRUPTED') if not any((good, result, failed, interrupted, skipped, env_changed, fail_env_changed)): - result.append("NO TEST RUN") + result.append("NO TESTS RAN") elif not result: result.append('SUCCESS') result = ', '.join(result) @@ -707,7 +712,7 @@ def test_failing(self): test_failing = self.create_test('failing', code=code) tests = [test_ok, test_failing] - output = self.run_tests(*tests, exitcode=2) + output = self.run_tests(*tests, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, tests, failed=test_failing) def test_resources(self): @@ -748,13 +753,14 @@ def test_random(self): test = self.create_test('random', code) # first run to get the output with the random seed - output = self.run_tests('-r', test) + output = self.run_tests('-r', test, exitcode=EXITCODE_NO_TESTS_RAN) randseed = self.parse_random_seed(output) match = self.regex_search(r'TESTRANDOM: ([0-9]+)', output) test_random = int(match.group(1)) # try to reproduce with the random seed - output = self.run_tests('-r', '--randseed=%s' % randseed, test) + output = self.run_tests('-r', '--randseed=%s' % randseed, test, + exitcode=EXITCODE_NO_TESTS_RAN) randseed2 = self.parse_random_seed(output) self.assertEqual(randseed2, randseed) @@ -813,7 +819,7 @@ def test_fromfile(self): def test_interrupted(self): code = TEST_INTERRUPTED test = self.create_test('sigint', code=code) - output = self.run_tests(test, exitcode=130) + output = self.run_tests(test, exitcode=EXITCODE_INTERRUPTED) self.check_executed_tests(output, test, omitted=test, interrupted=True) @@ -838,7 +844,7 @@ def test_slowest_interrupted(self): args = ("--slowest", "-j2", test) else: args = ("--slowest", test) - output = self.run_tests(*args, exitcode=130) + output = self.run_tests(*args, exitcode=EXITCODE_INTERRUPTED) self.check_executed_tests(output, test, omitted=test, interrupted=True) @@ -878,7 +884,7 @@ def test_run(self): builtins.__dict__['RUN'] = 1 """) test = self.create_test('forever', code=code) - output = self.run_tests('--forever', test, exitcode=2) + output = self.run_tests('--forever', test, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, [test]*3, failed=test) def check_leak(self, code, what): @@ -887,7 +893,7 @@ def check_leak(self, code, what): filename = 'reflog.txt' self.addCleanup(os_helper.unlink, filename) output = self.run_tests('--huntrleaks', '3:3:', test, - exitcode=2, + exitcode=EXITCODE_BAD_TEST, stderr=subprocess.STDOUT) self.check_executed_tests(output, [test], failed=test) @@ -969,7 +975,7 @@ def test_crashed(self): crash_test = self.create_test(name="crash", code=code) tests = [crash_test] - output = self.run_tests("-j2", *tests, exitcode=2) + output = self.run_tests("-j2", *tests, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, tests, failed=crash_test, randomize=True) @@ -1069,7 +1075,8 @@ def test_env_changed(self): self.check_executed_tests(output, [testname], env_changed=testname) # fail with --fail-env-changed - output = self.run_tests("--fail-env-changed", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=testname, fail_env_changed=True) @@ -1088,7 +1095,7 @@ def test_fail_always(self): """) testname = self.create_test(code=code) - output = self.run_tests("-w", testname, exitcode=2) + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, [testname], failed=testname, rerun={testname: "test_fail_always"}) @@ -1123,7 +1130,8 @@ def test_bug(self): """) testname = self.create_test(code=code) - output = self.run_tests(testname, "-m", "nosuchtest", exitcode=0) + output = self.run_tests(testname, "-m", "nosuchtest", + exitcode=EXITCODE_NO_TESTS_RAN) self.check_executed_tests(output, [testname], no_test_ran=testname) def test_no_tests_ran_skip(self): @@ -1136,7 +1144,7 @@ def test_skipped(self): """) testname = self.create_test(code=code) - output = self.run_tests(testname, exitcode=0) + output = self.run_tests(testname) self.check_executed_tests(output, [testname]) def test_no_tests_ran_multiple_tests_nonexistent(self): @@ -1150,7 +1158,8 @@ def test_bug(self): testname = self.create_test(code=code) testname2 = self.create_test(code=code) - output = self.run_tests(testname, testname2, "-m", "nosuchtest", exitcode=0) + output = self.run_tests(testname, testname2, "-m", "nosuchtest", + exitcode=EXITCODE_NO_TESTS_RAN) self.check_executed_tests(output, [testname, testname2], no_test_ran=[testname, testname2]) @@ -1198,7 +1207,8 @@ def test_garbage(self): """) testname = self.create_test(code=code) - output = self.run_tests("--fail-env-changed", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1224,7 +1234,8 @@ def test_sleep(self): """) testname = self.create_test(code=code) - output = self.run_tests("-j2", "--timeout=1.0", testname, exitcode=2) + output = self.run_tests("-j2", "--timeout=1.0", testname, + exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, [testname], failed=testname) self.assertRegex(output, @@ -1256,7 +1267,8 @@ def test_unraisable_exc(self): """) testname = self.create_test(code=code) - output = self.run_tests("--fail-env-changed", "-v", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", "-v", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1287,7 +1299,8 @@ def test_threading_excepthook(self): """) testname = self.create_test(code=code) - output = self.run_tests("--fail-env-changed", "-v", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", "-v", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1328,7 +1341,7 @@ def test_print_warning(self): for option in ("-v", "-W"): with self.subTest(option=option): cmd = ["--fail-env-changed", option, testname] - output = self.run_tests(*cmd, exitcode=3) + output = self.run_tests(*cmd, exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1373,7 +1386,8 @@ def test_leak_tmp_file(self): """) testnames = [self.create_test(code=code) for _ in range(3)] - output = self.run_tests("--fail-env-changed", "-v", "-j2", *testnames, exitcode=3) + output = self.run_tests("--fail-env-changed", "-v", "-j2", *testnames, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, testnames, env_changed=testnames, fail_env_changed=True, diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 6789fe4cc72e3a..8fe62216ecdca0 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -752,18 +752,25 @@ def _copy(src, dst): @os_helper.skip_unless_symlink def test_copytree_dangling_symlinks(self): - # a dangling symlink raises an error at the end src_dir = self.mkdtemp() + valid_file = os.path.join(src_dir, 'test.txt') + write_file(valid_file, 'abc') + dir_a = os.path.join(src_dir, 'dir_a') + os.mkdir(dir_a) + for d in src_dir, dir_a: + os.symlink('IDONTEXIST', os.path.join(d, 'broken')) + os.symlink(valid_file, os.path.join(d, 'valid')) + + # A dangling symlink should raise an error. dst_dir = os.path.join(self.mkdtemp(), 'destination') - os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt')) - os.mkdir(os.path.join(src_dir, 'test_dir')) - write_file((src_dir, 'test_dir', 'test.txt'), '456') self.assertRaises(Error, shutil.copytree, src_dir, dst_dir) - # a dangling symlink is ignored with the proper flag + # Dangling symlinks should be ignored with the proper flag. dst_dir = os.path.join(self.mkdtemp(), 'destination2') shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True) - self.assertNotIn('test.txt', os.listdir(dst_dir)) + for root, dirs, files in os.walk(dst_dir): + self.assertNotIn('broken', files) + self.assertIn('valid', files) # a dangling symlink is copied if symlinks=True dst_dir = os.path.join(self.mkdtemp(), 'destination3') diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py index 3210ef217703e5..b6d5b8c3d82580 100644 --- a/Lib/test/test_smtplib.py +++ b/Lib/test/test_smtplib.py @@ -21,13 +21,11 @@ from test.support import hashlib_helper from test.support import socket_helper from test.support import threading_helper -from test.support import warnings_helper +from test.support import asyncore from unittest.mock import Mock from . import smtpd -asyncore = warnings_helper.import_deprecated('asyncore') - support.requires_working_socket(module=True) diff --git a/Lib/test/test_source_encoding.py b/Lib/test/test_source_encoding.py index cfc4b13f18f330..b05173ad00d442 100644 --- a/Lib/test/test_source_encoding.py +++ b/Lib/test/test_source_encoding.py @@ -160,6 +160,18 @@ def test_file_parse_error_multiline(self): finally: os.unlink(TESTFN) + def test_tokenizer_fstring_warning_in_first_line(self): + source = "0b1and 2" + with open(TESTFN, "w") as fd: + fd.write("{}".format(source)) + try: + retcode, stdout, stderr = script_helper.assert_python_ok(TESTFN) + self.assertIn(b"SyntaxWarning: invalid binary litera", stderr) + self.assertEqual(stderr.count(source.encode()), 1) + finally: + os.unlink(TESTFN) + + class AbstractSourceEncodingTest: def test_default_coding(self): diff --git a/Lib/test/test_sqlite3/test_transactions.py b/Lib/test/test_sqlite3/test_transactions.py index 9c3d19e79bd989..5d211dd47b0b6b 100644 --- a/Lib/test/test_sqlite3/test_transactions.py +++ b/Lib/test/test_sqlite3/test_transactions.py @@ -22,9 +22,11 @@ import unittest import sqlite3 as sqlite +from contextlib import contextmanager from test.support import LOOPBACK_TIMEOUT from test.support.os_helper import TESTFN, unlink +from test.support.script_helper import assert_python_ok from test.test_sqlite3.test_dbapi import memory_database @@ -366,5 +368,176 @@ def test_isolation_level_none(self): self.assertEqual(self.traced, [self.QUERY]) +class AutocommitAttribute(unittest.TestCase): + """Test PEP 249-compliant autocommit behaviour.""" + legacy = sqlite.LEGACY_TRANSACTION_CONTROL + + @contextmanager + def check_stmt_trace(self, cx, expected, reset=True): + try: + traced = [] + cx.set_trace_callback(lambda stmt: traced.append(stmt)) + yield + finally: + self.assertEqual(traced, expected) + if reset: + cx.set_trace_callback(None) + + def test_autocommit_default(self): + with memory_database() as cx: + self.assertEqual(cx.autocommit, + sqlite.LEGACY_TRANSACTION_CONTROL) + + def test_autocommit_setget(self): + dataset = ( + True, + False, + sqlite.LEGACY_TRANSACTION_CONTROL, + ) + for mode in dataset: + with self.subTest(mode=mode): + with memory_database(autocommit=mode) as cx: + self.assertEqual(cx.autocommit, mode) + with memory_database() as cx: + cx.autocommit = mode + self.assertEqual(cx.autocommit, mode) + + def test_autocommit_setget_invalid(self): + msg = "autocommit must be True, False, or.*LEGACY" + for mode in "a", 12, (), None: + with self.subTest(mode=mode): + with self.assertRaisesRegex(ValueError, msg): + sqlite.connect(":memory:", autocommit=mode) + + def test_autocommit_disabled(self): + expected = [ + "SELECT 1", + "COMMIT", + "BEGIN", + "ROLLBACK", + "BEGIN", + ] + with memory_database(autocommit=False) as cx: + self.assertTrue(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.execute("SELECT 1") + cx.commit() + cx.rollback() + + def test_autocommit_disabled_implicit_rollback(self): + expected = ["ROLLBACK"] + with memory_database(autocommit=False) as cx: + self.assertTrue(cx.in_transaction) + with self.check_stmt_trace(cx, expected, reset=False): + cx.close() + + def test_autocommit_enabled(self): + expected = ["CREATE TABLE t(t)", "INSERT INTO t VALUES(1)"] + with memory_database(autocommit=True) as cx: + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.execute("CREATE TABLE t(t)") + cx.execute("INSERT INTO t VALUES(1)") + self.assertFalse(cx.in_transaction) + + def test_autocommit_enabled_txn_ctl(self): + for op in "commit", "rollback": + with self.subTest(op=op): + with memory_database(autocommit=True) as cx: + meth = getattr(cx, op) + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, []): + meth() # expect this to pass silently + self.assertFalse(cx.in_transaction) + + def test_autocommit_disabled_then_enabled(self): + expected = ["COMMIT"] + with memory_database(autocommit=False) as cx: + self.assertTrue(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.autocommit = True # should commit + self.assertFalse(cx.in_transaction) + + def test_autocommit_enabled_then_disabled(self): + expected = ["BEGIN"] + with memory_database(autocommit=True) as cx: + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.autocommit = False # should begin + self.assertTrue(cx.in_transaction) + + def test_autocommit_explicit_then_disabled(self): + expected = ["BEGIN DEFERRED"] + with memory_database(autocommit=True) as cx: + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.execute("BEGIN DEFERRED") + cx.autocommit = False # should now be a no-op + self.assertTrue(cx.in_transaction) + + def test_autocommit_enabled_ctx_mgr(self): + with memory_database(autocommit=True) as cx: + # The context manager is a no-op if autocommit=True + with self.check_stmt_trace(cx, []): + with cx: + self.assertFalse(cx.in_transaction) + self.assertFalse(cx.in_transaction) + + def test_autocommit_disabled_ctx_mgr(self): + expected = ["COMMIT", "BEGIN"] + with memory_database(autocommit=False) as cx: + with self.check_stmt_trace(cx, expected): + with cx: + self.assertTrue(cx.in_transaction) + self.assertTrue(cx.in_transaction) + + def test_autocommit_compat_ctx_mgr(self): + expected = ["BEGIN ", "INSERT INTO T VALUES(1)", "COMMIT"] + with memory_database(autocommit=self.legacy) as cx: + cx.execute("create table t(t)") + with self.check_stmt_trace(cx, expected): + with cx: + self.assertFalse(cx.in_transaction) + cx.execute("INSERT INTO T VALUES(1)") + self.assertTrue(cx.in_transaction) + self.assertFalse(cx.in_transaction) + + def test_autocommit_enabled_executescript(self): + expected = ["BEGIN", "SELECT 1"] + with memory_database(autocommit=True) as cx: + with self.check_stmt_trace(cx, expected): + self.assertFalse(cx.in_transaction) + cx.execute("BEGIN") + cx.executescript("SELECT 1") + self.assertTrue(cx.in_transaction) + + def test_autocommit_disabled_executescript(self): + expected = ["SELECT 1"] + with memory_database(autocommit=False) as cx: + with self.check_stmt_trace(cx, expected): + self.assertTrue(cx.in_transaction) + cx.executescript("SELECT 1") + self.assertTrue(cx.in_transaction) + + def test_autocommit_compat_executescript(self): + expected = ["BEGIN", "COMMIT", "SELECT 1"] + with memory_database(autocommit=self.legacy) as cx: + with self.check_stmt_trace(cx, expected): + self.assertFalse(cx.in_transaction) + cx.execute("BEGIN") + cx.executescript("SELECT 1") + self.assertFalse(cx.in_transaction) + + def test_autocommit_disabled_implicit_shutdown(self): + # The implicit ROLLBACK should not call back into Python during + # interpreter tear-down. + code = """if 1: + import sqlite3 + cx = sqlite3.connect(":memory:", autocommit=False) + cx.set_trace_callback(print) + """ + assert_python_ok("-c", code, PYTHONIOENCODING="utf-8") + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_sqlite3/test_types.py b/Lib/test/test_sqlite3/test_types.py index 62318823510d40..5e0ff353cbbd6b 100644 --- a/Lib/test/test_sqlite3/test_types.py +++ b/Lib/test/test_sqlite3/test_types.py @@ -106,9 +106,9 @@ def test_string_with_surrogates(self): @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform') @support.bigmemtest(size=2**31, memuse=4, dry_run=False) def test_too_large_string(self, maxsize): - with self.assertRaises(sqlite.InterfaceError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", ('x'*(2**31-1),)) - with self.assertRaises(OverflowError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", ('x'*(2**31),)) self.cur.execute("select 1 from test") row = self.cur.fetchone() @@ -117,9 +117,9 @@ def test_too_large_string(self, maxsize): @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform') @support.bigmemtest(size=2**31, memuse=3, dry_run=False) def test_too_large_blob(self, maxsize): - with self.assertRaises(sqlite.InterfaceError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", (b'x'*(2**31-1),)) - with self.assertRaises(OverflowError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", (b'x'*(2**31),)) self.cur.execute("select 1 from test") row = self.cur.fetchone() diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 5007e08f321b5a..e926fc5e88e584 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -9,6 +9,7 @@ from test.support import socket_helper from test.support import threading_helper from test.support import warnings_helper +from test.support import asyncore import socket import select import time @@ -30,9 +31,6 @@ ctypes = None -asyncore = warnings_helper.import_deprecated('asyncore') - - ssl = import_helper.import_module("ssl") import _ssl diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 05ce79f126590a..31a3cb6b53a6f2 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -3003,14 +3003,19 @@ def __init__(self, mu, sigma): nd = NormalDist(100, 15) self.assertNotEqual(nd, lnd) - def test_pickle_and_copy(self): + def test_copy(self): nd = self.module.NormalDist(37.5, 5.625) nd1 = copy.copy(nd) self.assertEqual(nd, nd1) nd2 = copy.deepcopy(nd) self.assertEqual(nd, nd2) - nd3 = pickle.loads(pickle.dumps(nd)) - self.assertEqual(nd, nd3) + + def test_pickle(self): + nd = self.module.NormalDist(37.5, 5.625) + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + pickled = pickle.loads(pickle.dumps(nd, protocol=proto)) + self.assertEqual(nd, pickled) def test_hashability(self): ND = self.module.NormalDist diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index 7247b7e48bc2b6..9b663c00223d1b 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -109,11 +109,11 @@ def test_eval_str_invalid_escape(self): for b in range(1, 128): if b in b"""\n\r"'01234567NU\\abfnrtuvx""": continue - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"'\%c'" % b), '\\' + chr(b)) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") @@ -121,7 +121,7 @@ def test_eval_str_invalid_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("'''\n\\z'''") exc = cm.exception @@ -133,11 +133,11 @@ def test_eval_str_invalid_escape(self): def test_eval_str_invalid_octal_escape(self): for i in range(0o400, 0o1000): - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"'\%o'" % i), chr(i)) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("'''\n\\407'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), @@ -146,7 +146,7 @@ def test_eval_str_invalid_octal_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("'''\n\\407'''") exc = cm.exception @@ -186,11 +186,11 @@ def test_eval_bytes_invalid_escape(self): for b in range(1, 128): if b in b"""\n\r"'01234567\\abfnrtvx""": continue - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"b'\%c'" % b), b'\\' + bytes([b])) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") @@ -198,7 +198,7 @@ def test_eval_bytes_invalid_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("b'''\n\\z'''") exc = cm.exception @@ -209,11 +209,11 @@ def test_eval_bytes_invalid_escape(self): def test_eval_bytes_invalid_octal_escape(self): for i in range(0o400, 0o1000): - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"b'\%o'" % i), bytes([i & 0o377])) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\407'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), @@ -222,7 +222,7 @@ def test_eval_bytes_invalid_octal_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("b'''\n\\407'''") exc = cm.exception diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index b0f11af1a7892e..6b1f22f66fd157 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -723,23 +723,56 @@ def test_issue35714(self): struct.calcsize(s) @support.cpython_only - def test_issue45034_unsigned(self): - _testcapi = import_helper.import_module('_testcapi') - error_msg = f'ushort format requires 0 <= number <= {_testcapi.USHRT_MAX}' - with self.assertRaisesRegex(struct.error, error_msg): - struct.pack('H', 70000) # too large - with self.assertRaisesRegex(struct.error, error_msg): - struct.pack('H', -1) # too small + def test_issue98248(self): + def test_error_msg(prefix, int_type, is_unsigned): + fmt_str = prefix + int_type + size = struct.calcsize(fmt_str) + if is_unsigned: + max_ = 2 ** (size * 8) - 1 + min_ = 0 + else: + max_ = 2 ** (size * 8 - 1) - 1 + min_ = -2 ** (size * 8 - 1) + error_msg = f"'{int_type}' format requires {min_} <= number <= {max_}" + for number in [int(-1e50), min_ - 1, max_ + 1, int(1e50)]: + with self.subTest(format_str=fmt_str, number=number): + with self.assertRaisesRegex(struct.error, error_msg): + struct.pack(fmt_str, number) + error_msg = "required argument is not an integer" + not_number = "" + with self.subTest(format_str=fmt_str, number=not_number): + with self.assertRaisesRegex(struct.error, error_msg): + struct.pack(fmt_str, not_number) + + for prefix in '@=<>': + for int_type in 'BHILQ': + test_error_msg(prefix, int_type, True) + for int_type in 'bhilq': + test_error_msg(prefix, int_type, False) + + int_type = 'N' + test_error_msg('@', int_type, True) + + int_type = 'n' + test_error_msg('@', int_type, False) @support.cpython_only - def test_issue45034_signed(self): - _testcapi = import_helper.import_module('_testcapi') - error_msg = f'short format requires {_testcapi.SHRT_MIN} <= number <= {_testcapi.SHRT_MAX}' - with self.assertRaisesRegex(struct.error, error_msg): - struct.pack('h', 70000) # too large - with self.assertRaisesRegex(struct.error, error_msg): - struct.pack('h', -70000) # too small - + def test_issue98248_error_propagation(self): + class Div0: + def __index__(self): + 1 / 0 + + def test_error_propagation(fmt_str): + with self.subTest(format_str=fmt_str, exception="ZeroDivisionError"): + with self.assertRaises(ZeroDivisionError): + struct.pack(fmt_str, Div0()) + + for prefix in '@=<>': + for int_type in 'BHILQbhilq': + test_error_propagation(prefix + int_type) + + test_error_propagation('N') + test_error_propagation('n') class UnpackIteratorTest(unittest.TestCase): """ diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 424a4a93b6f972..abd0dd8b25699b 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -2832,7 +2832,7 @@ def test_close_fds(self): @unittest.skipIf(sys.platform.startswith("freebsd") and os.stat("/dev").st_dev == os.stat("/dev/fd").st_dev, - "Requires fdescfs mounted on /dev/fd on FreeBSD.") + "Requires fdescfs mounted on /dev/fd on FreeBSD") def test_close_fds_when_max_fd_is_lowered(self): """Confirm that issue21618 is fixed (may fail under valgrind).""" fd_status = support.findfile("fd_status.py", subdir="subprocessdata") @@ -3209,7 +3209,7 @@ def __int__(self): 1, 2, 3, 4, True, True, 0, None, None, None, -1, - None, "no vfork") + None, True) self.assertIn('fds_to_keep', str(c.exception)) finally: if not gc_enabled: diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py index de2e7305ccce24..f4a8d434ed1b8c 100644 --- a/Lib/test/test_sundry.py +++ b/Lib/test/test_sundry.py @@ -18,29 +18,6 @@ def test_untested_modules_can_be_imported(self): self.fail('{} has tests even though test_sundry claims ' 'otherwise'.format(name)) - import distutils.bcppcompiler - import distutils.ccompiler - import distutils.cygwinccompiler - import distutils.filelist - import distutils.text_file - import distutils.unixccompiler - - import distutils.command.bdist_dumb - import distutils.command.bdist - import distutils.command.bdist_rpm - import distutils.command.build_clib - import distutils.command.build_ext - import distutils.command.build - import distutils.command.clean - import distutils.command.config - import distutils.command.install_data - import distutils.command.install_egg_info - import distutils.command.install_headers - import distutils.command.install_lib - import distutils.command.register - import distutils.command.sdist - import distutils.command.upload - import html.entities try: diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 69a86231e2f276..cb284195d976ff 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -756,6 +756,27 @@ >>> __debug__: int Traceback (most recent call last): SyntaxError: cannot assign to __debug__ +>>> f(a=) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(a, b, c=) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(a, b, c=, d) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(*args=[0]) +Traceback (most recent call last): +SyntaxError: cannot assign to iterable argument unpacking +>>> f(a, b, *args=[0]) +Traceback (most recent call last): +SyntaxError: cannot assign to iterable argument unpacking +>>> f(**kwargs={'a': 1}) +Traceback (most recent call last): +SyntaxError: cannot assign to keyword argument unpacking +>>> f(a, b, *args, **kwargs={'a': 1}) +Traceback (most recent call last): +SyntaxError: cannot assign to keyword argument unpacking More set_context(): @@ -1584,6 +1605,22 @@ Traceback (most recent call last): SyntaxError: trailing comma not allowed without surrounding parentheses +>>> import a from b +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z from b.y.z +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a from b as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z from b.y.z as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + # Check that we dont raise the "trailing comma" error if there is more # input to the left of the valid part that we parsed. @@ -1998,6 +2035,16 @@ def test_generator_in_function_call(self): "Generator expression must be parenthesized", lineno=1, end_lineno=1, offset=11, end_offset=53) + def test_except_then_except_star(self): + self._check_error("try: pass\nexcept ValueError: pass\nexcept* TypeError: pass", + r"cannot have both 'except' and 'except\*' on the same 'try'", + lineno=3, end_lineno=3, offset=1, end_offset=8) + + def test_except_star_then_except(self): + self._check_error("try: pass\nexcept* ValueError: pass\nexcept TypeError: pass", + r"cannot have both 'except' and 'except\*' on the same 'try'", + lineno=3, end_lineno=3, offset=1, end_offset=7) + def test_empty_line_after_linecont(self): # See issue-40847 s = r"""\ @@ -2098,6 +2145,22 @@ def test_error_parenthesis(self): for paren in ")]}": self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'") + # Some more complex examples: + code = """\ +func( + a=["unclosed], # Need a quote in this comment: " + b=2, +) +""" + self._check_error(code, "parenthesis '\\)' does not match opening parenthesis '\\['") + + def test_error_string_literal(self): + + self._check_error("'blech", "unterminated string literal") + self._check_error('"blech', "unterminated string literal") + self._check_error("'''blech", "unterminated triple-quoted string literal") + self._check_error('"""blech', "unterminated triple-quoted string literal") + def test_invisible_characters(self): self._check_error('print\x17("Hello")', "invalid non-printable character") diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 9184e9a42f1941..841bdbda3768be 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1422,7 +1422,7 @@ class C(object): pass def func(): return sys._getframe() x = func() - check(x, size('3Pi3c7P2ic??2P')) + check(x, size('3Pi3c7P2ic??2PP')) # function def func(): pass check(func, size('14Pi')) @@ -1439,7 +1439,7 @@ def bar(cls): check(bar, size('PP')) # generator def get_gen(): yield 1 - check(get_gen(), size('P2P4P4c7P2ic??P')) + check(get_gen(), size('P2P4P4c7P2ic??2PPP')) # iterator check(iter('abc'), size('lP')) # callable-iterator @@ -1484,7 +1484,8 @@ def delx(self): del self.__x # PyCapsule # XXX # rangeiterator - check(iter(range(1)), size('4l')) + check(iter(range(1)), size('3l')) + check(iter(range(2**65)), size('3P')) # reverse check(reversed(''), size('nP')) # range diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py index a448f80449ca3d..e5841a69147793 100644 --- a/Lib/test/test_sys_settrace.py +++ b/Lib/test/test_sys_settrace.py @@ -346,7 +346,7 @@ def make_tracer(): return Tracer() def compare_events(self, line_offset, events, expected_events): - events = [(l - line_offset, e) for (l, e) in events] + events = [(l - line_offset if l is not None else None, e) for (l, e) in events] if events != expected_events: self.fail( "events did not match expectation:\n" + @@ -2703,6 +2703,7 @@ def test_jump_extended_args_unpack_ex_simple(output): _, *_, _ = output.append(2) or "Spam" output.append(3) + @unittest.skip ("NEED TO FIX THIS FOR 3-OPARGS") @jump_test(3, 4, [1, 4, 4, 5]) def test_jump_extended_args_unpack_ex_tricky(output): output.append(1) @@ -2711,6 +2712,7 @@ def test_jump_extended_args_unpack_ex_tricky(output): ) = output.append(4) or "Spam" output.append(5) + @unittest.skip ("NEED TO FIX THIS FOR 3-OPARGS") def test_jump_extended_args_for_iter(self): # In addition to failing when extended arg handling is broken, this can # also hang for a *very* long time: diff --git a/Lib/test/test_syslog.py b/Lib/test/test_syslog.py index 2125ec58d87e03..54db80fa9df1af 100644 --- a/Lib/test/test_syslog.py +++ b/Lib/test/test_syslog.py @@ -5,6 +5,7 @@ import threading import time import unittest +from textwrap import dedent # XXX(nnorwitz): This test sucks. I don't know of a platform independent way # to verify that the messages were really logged. @@ -78,6 +79,69 @@ def logger(): finally: sys.setswitchinterval(orig_si) + def test_subinterpreter_syslog(self): + # syslog.syslog() is not allowed in subinterpreters, but only if + # syslog.openlog() hasn't been called in the main interpreter yet. + with self.subTest('before openlog()'): + code = dedent(''' + import syslog + caught_error = False + try: + syslog.syslog('foo') + except RuntimeError: + caught_error = True + assert(caught_error) + ''') + res = support.run_in_subinterp(code) + self.assertEqual(res, 0) + + syslog.openlog() + try: + with self.subTest('after openlog()'): + code = dedent(''' + import syslog + syslog.syslog('foo') + ''') + res = support.run_in_subinterp(code) + self.assertEqual(res, 0) + finally: + syslog.closelog() + + def test_subinterpreter_openlog(self): + try: + code = dedent(''' + import syslog + caught_error = False + try: + syslog.openlog() + except RuntimeError: + caught_error = True + + assert(caught_error) + ''') + res = support.run_in_subinterp(code) + self.assertEqual(res, 0) + finally: + syslog.closelog() + + def test_subinterpreter_closelog(self): + syslog.openlog('python') + try: + code = dedent(''' + import syslog + caught_error = False + try: + syslog.closelog() + except RuntimeError: + caught_error = True + + assert(caught_error) + ''') + res = support.run_in_subinterp(code) + self.assertEqual(res, 0) + finally: + syslog.closelog() + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index 0868d5d6e90915..213932069201b9 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -734,6 +734,18 @@ def test_zlib_error_does_not_leak(self): with self.assertRaises(tarfile.ReadError): tarfile.open(self.tarname) + def test_next_on_empty_tarfile(self): + fd = io.BytesIO() + tf = tarfile.open(fileobj=fd, mode="w") + tf.close() + + fd.seek(0) + with tarfile.open(fileobj=fd, mode="r|") as tf: + self.assertEqual(tf.next(), None) + + fd.seek(0) + with tarfile.open(fileobj=fd, mode="r") as tf: + self.assertEqual(tf.next(), None) class MiscReadTest(MiscReadTestBase, unittest.TestCase): test_fail_comp = None diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 47f2c06685bcaa..63c2501cfe2338 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -10,6 +10,8 @@ from unittest import TestCase, mock from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) +from test.support import os_helper +from test.support.script_helper import run_test_script, make_script import os import token @@ -2631,5 +2633,19 @@ def fib(n): self.assertEqual(get_tokens(code), get_tokens(code_no_cont)) +class CTokenizerBufferTests(unittest.TestCase): + def test_newline_at_the_end_of_buffer(self): + # See issue 99581: Make sure that if we need to add a new line at the + # end of the buffer, we have enough space in the buffer, specially when + # the current line is as long as the buffer space available. + test_script = f"""\ + #coding: latin-1 + #{"a"*10000} + #{"a"*10002}""" + with os_helper.temp_dir() as temp_dir: + file_name = make_script(temp_dir, 'foo', test_script) + run_test_script(file_name) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py index 5f712111ca14e0..fad2b3b8379ffc 100644 --- a/Lib/test/test_trace.py +++ b/Lib/test/test_trace.py @@ -1,4 +1,5 @@ import os +from pickle import dump import sys from test.support import captured_stdout from test.support.os_helper import (TESTFN, rmtree, unlink) @@ -412,6 +413,15 @@ def test_issue9936(self): self.assertIn(modname, coverage) self.assertEqual(coverage[modname], (5, 100)) + def test_coverageresults_update(self): + # Update empty CoverageResults with a non-empty infile. + infile = TESTFN + '-infile' + with open(infile, 'wb') as f: + dump(({}, {}, {'caller': 1}), f, protocol=1) + self.addCleanup(unlink, infile) + results = trace.CoverageResults({}, {}, infile, {}) + self.assertEqual(results.callers, {'caller': 1}) + ### Tests that don't mess with sys.settrace and can be traced ### themselves TODO: Skip tests that do mess with sys.settrace when ### regrtest is invoked with -T option. diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 149d0234fe8a72..95b1bae4f60850 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -559,6 +559,23 @@ def f_with_binary_operator(): result_lines = self.get_exception(f_with_binary_operator) self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_binary_operators_with_unicode(self): + def f_with_binary_operator(): + áóí = 20 + return 10 + áóí / 0 + 30 + + lineno_f = f_with_binary_operator.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + f' File "{__file__}", line {lineno_f+2}, in f_with_binary_operator\n' + ' return 10 + áóí / 0 + 30\n' + ' ~~~~^~~\n' + ) + result_lines = self.get_exception(f_with_binary_operator) + self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_binary_operators_two_char(self): def f_with_binary_operator(): divisor = 20 @@ -593,6 +610,23 @@ def f_with_subscript(): result_lines = self.get_exception(f_with_subscript) self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_subscript_unicode(self): + def f_with_subscript(): + some_dict = {'ó': {'á': {'í': {'theta': 1}}}} + return some_dict['ó']['á']['í']['beta'] + + lineno_f = f_with_subscript.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + f' File "{__file__}", line {lineno_f+2}, in f_with_subscript\n' + " return some_dict['ó']['á']['í']['beta']\n" + ' ~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^\n' + ) + result_lines = self.get_exception(f_with_subscript) + self.assertEqual(result_lines, expected_error.splitlines()) + def test_traceback_specialization_with_syntax_error(self): bytecode = compile("1 / 0 / 1 / 2\n", TESTFN, "exec") @@ -2944,9 +2978,9 @@ class MyClass: for name in ("b", "v", "m", "py"): with self.subTest(name=name): actual = self.get_suggestion(MyClass, name) - self.assertNotIn("you mean", actual) - self.assertNotIn("vvv", actual) - self.assertNotIn("mom", actual) + self.assertNotIn("Did you mean", actual) + self.assertNotIn("'vvv", actual) + self.assertNotIn("'mom'", actual) self.assertNotIn("'id'", actual) self.assertNotIn("'w'", actual) self.assertNotIn("'pytho'", actual) @@ -3134,9 +3168,9 @@ def test_import_from_error_bad_suggestions_do_not_trigger_for_small_names(self): for name in ("b", "v", "m", "py"): with self.subTest(name=name): actual = self.get_import_from_suggestion(code, name) - self.assertNotIn("you mean", actual) - self.assertNotIn("vvv", actual) - self.assertNotIn("mom", actual) + self.assertNotIn("Did you mean", actual) + self.assertNotIn("'vvv'", actual) + self.assertNotIn("'mom'", actual) self.assertNotIn("'id'", actual) self.assertNotIn("'w'", actual) self.assertNotIn("'pytho'", actual) @@ -3357,6 +3391,31 @@ def func(): actual = self.get_suggestion(func) self.assertNotIn("blech", actual) + def test_name_error_with_instance(self): + class A: + def __init__(self): + self.blech = None + def foo(self): + blich = 1 + x = blech + + instance = A() + actual = self.get_suggestion(instance.foo) + self.assertIn("self.blech", actual) + + def test_unbound_local_error_with_instance(self): + class A: + def __init__(self): + self.blech = None + def foo(self): + blich = 1 + x = blech + blech = 1 + + instance = A() + actual = self.get_suggestion(instance.foo) + self.assertNotIn("self.blech", actual) + def test_unbound_local_error_does_not_match(self): def func(): something = 3 diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index f8168bd19f4e82..1cae1b0de7140f 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -772,20 +772,42 @@ class C(Generic[*Ts]): pass ('generic[*Ts]', '[*Ts]', 'generic[*Ts]'), ('generic[*Ts]', '[T, *Ts]', 'generic[T, *Ts]'), ('generic[*Ts]', '[*Ts, T]', 'generic[*Ts, T]'), + ('generic[T, *Ts]', '[()]', 'TypeError'), ('generic[T, *Ts]', '[int]', 'generic[int]'), ('generic[T, *Ts]', '[int, str]', 'generic[int, str]'), ('generic[T, *Ts]', '[int, str, bool]', 'generic[int, str, bool]'), + ('generic[list[T], *Ts]', '[()]', 'TypeError'), ('generic[list[T], *Ts]', '[int]', 'generic[list[int]]'), ('generic[list[T], *Ts]', '[int, str]', 'generic[list[int], str]'), ('generic[list[T], *Ts]', '[int, str, bool]', 'generic[list[int], str, bool]'), + ('generic[*Ts, T]', '[()]', 'TypeError'), ('generic[*Ts, T]', '[int]', 'generic[int]'), ('generic[*Ts, T]', '[int, str]', 'generic[int, str]'), ('generic[*Ts, T]', '[int, str, bool]', 'generic[int, str, bool]'), + ('generic[*Ts, list[T]]', '[()]', 'TypeError'), ('generic[*Ts, list[T]]', '[int]', 'generic[list[int]]'), ('generic[*Ts, list[T]]', '[int, str]', 'generic[int, list[str]]'), ('generic[*Ts, list[T]]', '[int, str, bool]', 'generic[int, str, list[bool]]'), + ('generic[T1, T2, *Ts]', '[()]', 'TypeError'), + ('generic[T1, T2, *Ts]', '[int]', 'TypeError'), + ('generic[T1, T2, *Ts]', '[int, str]', 'generic[int, str]'), + ('generic[T1, T2, *Ts]', '[int, str, bool]', 'generic[int, str, bool]'), + ('generic[T1, T2, *Ts]', '[int, str, bool, bytes]', 'generic[int, str, bool, bytes]'), + + ('generic[*Ts, T1, T2]', '[()]', 'TypeError'), + ('generic[*Ts, T1, T2]', '[int]', 'TypeError'), + ('generic[*Ts, T1, T2]', '[int, str]', 'generic[int, str]'), + ('generic[*Ts, T1, T2]', '[int, str, bool]', 'generic[int, str, bool]'), + ('generic[*Ts, T1, T2]', '[int, str, bool, bytes]', 'generic[int, str, bool, bytes]'), + + ('generic[T1, *Ts, T2]', '[()]', 'TypeError'), + ('generic[T1, *Ts, T2]', '[int]', 'TypeError'), + ('generic[T1, *Ts, T2]', '[int, str]', 'generic[int, str]'), + ('generic[T1, *Ts, T2]', '[int, str, bool]', 'generic[int, str, bool]'), + ('generic[T1, *Ts, T2]', '[int, str, bool, bytes]', 'generic[int, str, bool, bytes]'), + ('generic[T, *Ts]', '[*tuple_type[int, ...]]', 'generic[int, *tuple_type[int, ...]]'), ('generic[T, *Ts]', '[str, *tuple_type[int, ...]]', 'generic[str, *tuple_type[int, ...]]'), ('generic[T, *Ts]', '[*tuple_type[int, ...], str]', 'generic[int, *tuple_type[int, ...], str]'), @@ -2900,7 +2922,7 @@ class CP(P[int]): self.assertEqual(x.bar, 'abc') self.assertEqual(x.x, 1) self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'}) - s = pickle.dumps(P) + s = pickle.dumps(P, proto) D = pickle.loads(s) class E: @@ -7241,6 +7263,65 @@ class X(Generic[P, P2]): self.assertEqual(G1.__args__, ((int, str), (bytes,))) self.assertEqual(G2.__args__, ((int,), (str, bytes))) + def test_typevartuple_and_paramspecs_in_user_generics(self): + Ts = TypeVarTuple("Ts") + P = ParamSpec("P") + + class X(Generic[*Ts, P]): + f: Callable[P, int] + g: Tuple[*Ts] + + G1 = X[int, [bytes]] + self.assertEqual(G1.__args__, (int, (bytes,))) + G2 = X[int, str, [bytes]] + self.assertEqual(G2.__args__, (int, str, (bytes,))) + G3 = X[[bytes]] + self.assertEqual(G3.__args__, ((bytes,),)) + G4 = X[[]] + self.assertEqual(G4.__args__, ((),)) + with self.assertRaises(TypeError): + X[()] + + class Y(Generic[P, *Ts]): + f: Callable[P, int] + g: Tuple[*Ts] + + G1 = Y[[bytes], int] + self.assertEqual(G1.__args__, ((bytes,), int)) + G2 = Y[[bytes], int, str] + self.assertEqual(G2.__args__, ((bytes,), int, str)) + G3 = Y[[bytes]] + self.assertEqual(G3.__args__, ((bytes,),)) + G4 = Y[[]] + self.assertEqual(G4.__args__, ((),)) + with self.assertRaises(TypeError): + Y[()] + + def test_typevartuple_and_paramspecs_in_generic_aliases(self): + P = ParamSpec('P') + T = TypeVar('T') + Ts = TypeVarTuple('Ts') + + for C in Callable, collections.abc.Callable: + with self.subTest(generic=C): + A = C[P, Tuple[*Ts]] + B = A[[int, str], bytes, float] + self.assertEqual(B.__args__, (int, str, Tuple[bytes, float])) + + class X(Generic[T, P]): + pass + + A = X[Tuple[*Ts], P] + B = A[bytes, float, [int, str]] + self.assertEqual(B.__args__, (Tuple[bytes, float], (int, str,))) + + class Y(Generic[P, T]): + pass + + A = Y[P, Tuple[*Ts]] + B = A[[int, str], bytes, float] + self.assertEqual(B.__args__, ((int, str,), Tuple[bytes, float])) + def test_var_substitution(self): T = TypeVar("T") P = ParamSpec("P") @@ -7638,6 +7719,7 @@ class CustomerModel: "eq_default": True, "order_default": False, "kw_only_default": True, + "frozen_default": False, "field_specifiers": (), "kwargs": {}, } @@ -7668,6 +7750,7 @@ class CustomerModel(Decorated, frozen=True): "eq_default": True, "order_default": True, "kw_only_default": False, + "frozen_default": False, "field_specifiers": (), "kwargs": {"make_everything_awesome": True}, } @@ -7684,7 +7767,7 @@ def __new__( return super().__new__(cls, name, bases, namespace) Decorated = dataclass_transform( - order_default=True, field_specifiers=(Field,) + order_default=True, frozen_default=True, field_specifiers=(Field,) )(ModelMeta) class ModelBase(metaclass=Decorated): ... @@ -7699,6 +7782,7 @@ class CustomerModel(ModelBase, init=False): "eq_default": True, "order_default": True, "kw_only_default": False, + "frozen_default": True, "field_specifiers": (Field,), "kwargs": {}, } diff --git a/Lib/test/test_unary.py b/Lib/test/test_unary.py index c3c17cc9f611dd..a45fbf6bd6bc54 100644 --- a/Lib/test/test_unary.py +++ b/Lib/test/test_unary.py @@ -8,7 +8,6 @@ def test_negative(self): self.assertTrue(-2 == 0 - 2) self.assertEqual(-0, 0) self.assertEqual(--2, 2) - self.assertTrue(-2 == 0 - 2) self.assertTrue(-2.0 == 0 - 2.0) self.assertTrue(-2j == 0 - 2j) @@ -16,15 +15,13 @@ def test_positive(self): self.assertEqual(+2, 2) self.assertEqual(+0, 0) self.assertEqual(++2, 2) - self.assertEqual(+2, 2) self.assertEqual(+2.0, 2.0) self.assertEqual(+2j, 2j) def test_invert(self): - self.assertTrue(-2 == 0 - 2) - self.assertEqual(-0, 0) - self.assertEqual(--2, 2) - self.assertTrue(-2 == 0 - 2) + self.assertTrue(~2 == -(2+1)) + self.assertEqual(~0, -1) + self.assertEqual(~~2, 2) def test_no_overflow(self): nines = "9" * 32 diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 7230591d2c8281..5465d35534f111 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -16,7 +16,6 @@ import unicodedata import unittest import warnings -from test.support import import_helper from test.support import warnings_helper from test import support, string_tests from test.support.script_helper import assert_python_failure @@ -2380,12 +2379,7 @@ class s1: def __repr__(self): return '\\n' - class s2: - def __repr__(self): - return '\\n' - self.assertEqual(repr(s1()), '\\n') - self.assertEqual(repr(s2()), '\\n') def test_printable_repr(self): self.assertEqual(repr('\U00010000'), "'%c'" % (0x10000,)) # printable @@ -2628,507 +2622,6 @@ def test_check_encoding_errors(self): self.assertEqual(proc.rc, 10, proc) -class CAPITest(unittest.TestCase): - - # Test PyUnicode_FromFormat() - def test_from_format(self): - import_helper.import_module('ctypes') - from ctypes import ( - c_char_p, - pythonapi, py_object, sizeof, - c_int, c_long, c_longlong, c_ssize_t, - c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) - name = "PyUnicode_FromFormat" - _PyUnicode_FromFormat = getattr(pythonapi, name) - _PyUnicode_FromFormat.argtypes = (c_char_p,) - _PyUnicode_FromFormat.restype = py_object - - def PyUnicode_FromFormat(format, *args): - cargs = tuple( - py_object(arg) if isinstance(arg, str) else arg - for arg in args) - return _PyUnicode_FromFormat(format, *cargs) - - def check_format(expected, format, *args): - text = PyUnicode_FromFormat(format, *args) - self.assertEqual(expected, text) - - # ascii format, non-ascii argument - check_format('ascii\x7f=unicode\xe9', - b'ascii\x7f=%U', 'unicode\xe9') - - # non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV() - # raises an error - self.assertRaisesRegex(ValueError, - r'^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format ' - 'string, got a non-ASCII byte: 0xe9$', - PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii') - - # test "%c" - check_format('\uabcd', - b'%c', c_int(0xabcd)) - check_format('\U0010ffff', - b'%c', c_int(0x10ffff)) - with self.assertRaises(OverflowError): - PyUnicode_FromFormat(b'%c', c_int(0x110000)) - # Issue #18183 - check_format('\U00010000\U00100000', - b'%c%c', c_int(0x10000), c_int(0x100000)) - - # test "%" - check_format('%', - b'%%') - check_format('%s', - b'%%s') - check_format('[%]', - b'[%%]') - check_format('%abc', - b'%%%s', b'abc') - - # truncated string - check_format('abc', - b'%.3s', b'abcdef') - check_format('abc[\ufffd', - b'%.5s', 'abc[\u20ac]'.encode('utf8')) - check_format("'\\u20acABC'", - b'%A', '\u20acABC') - check_format("'\\u20", - b'%.5A', '\u20acABCDEF') - check_format("'\u20acABC'", - b'%R', '\u20acABC') - check_format("'\u20acA", - b'%.3R', '\u20acABCDEF') - check_format('\u20acAB', - b'%.3S', '\u20acABCDEF') - check_format('\u20acAB', - b'%.3U', '\u20acABCDEF') - check_format('\u20acAB', - b'%.3V', '\u20acABCDEF', None) - check_format('abc[\ufffd', - b'%.5V', None, 'abc[\u20ac]'.encode('utf8')) - - # following tests comes from #7330 - # test width modifier and precision modifier with %S - check_format("repr= abc", - b'repr=%5S', 'abc') - check_format("repr=ab", - b'repr=%.2S', 'abc') - check_format("repr= ab", - b'repr=%5.2S', 'abc') - - # test width modifier and precision modifier with %R - check_format("repr= 'abc'", - b'repr=%8R', 'abc') - check_format("repr='ab", - b'repr=%.3R', 'abc') - check_format("repr= 'ab", - b'repr=%5.3R', 'abc') - - # test width modifier and precision modifier with %A - check_format("repr= 'abc'", - b'repr=%8A', 'abc') - check_format("repr='ab", - b'repr=%.3A', 'abc') - check_format("repr= 'ab", - b'repr=%5.3A', 'abc') - - # test width modifier and precision modifier with %s - check_format("repr= abc", - b'repr=%5s', b'abc') - check_format("repr=ab", - b'repr=%.2s', b'abc') - check_format("repr= ab", - b'repr=%5.2s', b'abc') - - # test width modifier and precision modifier with %U - check_format("repr= abc", - b'repr=%5U', 'abc') - check_format("repr=ab", - b'repr=%.2U', 'abc') - check_format("repr= ab", - b'repr=%5.2U', 'abc') - - # test width modifier and precision modifier with %V - check_format("repr= abc", - b'repr=%5V', 'abc', b'123') - check_format("repr=ab", - b'repr=%.2V', 'abc', b'123') - check_format("repr= ab", - b'repr=%5.2V', 'abc', b'123') - check_format("repr= 123", - b'repr=%5V', None, b'123') - check_format("repr=12", - b'repr=%.2V', None, b'123') - check_format("repr= 12", - b'repr=%5.2V', None, b'123') - - # test integer formats (%i, %d, %u) - check_format('010', - b'%03i', c_int(10)) - check_format('0010', - b'%0.4i', c_int(10)) - check_format('-123', - b'%i', c_int(-123)) - check_format('-123', - b'%li', c_long(-123)) - check_format('-123', - b'%lli', c_longlong(-123)) - check_format('-123', - b'%zi', c_ssize_t(-123)) - - check_format('-123', - b'%d', c_int(-123)) - check_format('-123', - b'%ld', c_long(-123)) - check_format('-123', - b'%lld', c_longlong(-123)) - check_format('-123', - b'%zd', c_ssize_t(-123)) - - check_format('123', - b'%u', c_uint(123)) - check_format('123', - b'%lu', c_ulong(123)) - check_format('123', - b'%llu', c_ulonglong(123)) - check_format('123', - b'%zu', c_size_t(123)) - - # test long output - min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1)) - max_longlong = -min_longlong - 1 - check_format(str(min_longlong), - b'%lld', c_longlong(min_longlong)) - check_format(str(max_longlong), - b'%lld', c_longlong(max_longlong)) - max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1 - check_format(str(max_ulonglong), - b'%llu', c_ulonglong(max_ulonglong)) - PyUnicode_FromFormat(b'%p', c_void_p(-1)) - - # test padding (width and/or precision) - check_format('123'.rjust(10, '0'), - b'%010i', c_int(123)) - check_format('123'.rjust(100), - b'%100i', c_int(123)) - check_format('123'.rjust(100, '0'), - b'%.100i', c_int(123)) - check_format('123'.rjust(80, '0').rjust(100), - b'%100.80i', c_int(123)) - - check_format('123'.rjust(10, '0'), - b'%010u', c_uint(123)) - check_format('123'.rjust(100), - b'%100u', c_uint(123)) - check_format('123'.rjust(100, '0'), - b'%.100u', c_uint(123)) - check_format('123'.rjust(80, '0').rjust(100), - b'%100.80u', c_uint(123)) - - check_format('123'.rjust(10, '0'), - b'%010x', c_int(0x123)) - check_format('123'.rjust(100), - b'%100x', c_int(0x123)) - check_format('123'.rjust(100, '0'), - b'%.100x', c_int(0x123)) - check_format('123'.rjust(80, '0').rjust(100), - b'%100.80x', c_int(0x123)) - - # test %A - check_format(r"%A:'abc\xe9\uabcd\U0010ffff'", - b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') - - # test %V - check_format('repr=abc', - b'repr=%V', 'abc', b'xyz') - - # test %p - # We cannot test the exact result, - # because it returns a hex representation of a C pointer, - # which is going to be different each time. But, we can test the format. - p_format_regex = r'^0x[a-zA-Z0-9]{3,}$' - p_format1 = PyUnicode_FromFormat(b'%p', 'abc') - self.assertIsInstance(p_format1, str) - self.assertRegex(p_format1, p_format_regex) - - p_format2 = PyUnicode_FromFormat(b'%p %p', '123456', b'xyz') - self.assertIsInstance(p_format2, str) - self.assertRegex(p_format2, - r'0x[a-zA-Z0-9]{3,} 0x[a-zA-Z0-9]{3,}') - - # Extra args are ignored: - p_format3 = PyUnicode_FromFormat(b'%p', '123456', None, b'xyz') - self.assertIsInstance(p_format3, str) - self.assertRegex(p_format3, p_format_regex) - - # Test string decode from parameter of %s using utf-8. - # b'\xe4\xba\xba\xe6\xb0\x91' is utf-8 encoded byte sequence of - # '\u4eba\u6c11' - check_format('repr=\u4eba\u6c11', - b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91') - - #Test replace error handler. - check_format('repr=abc\ufffd', - b'repr=%V', None, b'abc\xff') - - # Issue #33817: empty strings - check_format('', - b'') - check_format('', - b'%s', b'') - - # check for crashes - for fmt in (b'%', b'%0', b'%01', b'%.', b'%.1', - b'%0%s', b'%1%s', b'%.%s', b'%.1%s', b'%1abc', - b'%l', b'%ll', b'%z', b'%ls', b'%lls', b'%zs'): - with self.subTest(fmt=fmt): - self.assertRaisesRegex(SystemError, 'invalid format string', - PyUnicode_FromFormat, fmt, b'abc') - self.assertRaisesRegex(SystemError, 'invalid format string', - PyUnicode_FromFormat, b'%+i', c_int(10)) - - # Test PyUnicode_AsWideChar() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_aswidechar(self): - from _testcapi import unicode_aswidechar - import_helper.import_module('ctypes') - from ctypes import c_wchar, sizeof - - wchar, size = unicode_aswidechar('abcdef', 2) - self.assertEqual(size, 2) - self.assertEqual(wchar, 'ab') - - wchar, size = unicode_aswidechar('abc', 3) - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc') - - wchar, size = unicode_aswidechar('abc', 4) - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc\0') - - wchar, size = unicode_aswidechar('abc', 10) - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc\0') - - wchar, size = unicode_aswidechar('abc\0def', 20) - self.assertEqual(size, 7) - self.assertEqual(wchar, 'abc\0def\0') - - nonbmp = chr(0x10ffff) - if sizeof(c_wchar) == 2: - buflen = 3 - nchar = 2 - else: # sizeof(c_wchar) == 4 - buflen = 2 - nchar = 1 - wchar, size = unicode_aswidechar(nonbmp, buflen) - self.assertEqual(size, nchar) - self.assertEqual(wchar, nonbmp + '\0') - - # Test PyUnicode_AsWideCharString() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_aswidecharstring(self): - from _testcapi import unicode_aswidecharstring - import_helper.import_module('ctypes') - from ctypes import c_wchar, sizeof - - wchar, size = unicode_aswidecharstring('abc') - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc\0') - - wchar, size = unicode_aswidecharstring('abc\0def') - self.assertEqual(size, 7) - self.assertEqual(wchar, 'abc\0def\0') - - nonbmp = chr(0x10ffff) - if sizeof(c_wchar) == 2: - nchar = 2 - else: # sizeof(c_wchar) == 4 - nchar = 1 - wchar, size = unicode_aswidecharstring(nonbmp) - self.assertEqual(size, nchar) - self.assertEqual(wchar, nonbmp + '\0') - - # Test PyUnicode_AsUCS4() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_asucs4(self): - from _testcapi import unicode_asucs4 - for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', - 'a\ud800b\udfffc', '\ud834\udd1e']: - l = len(s) - self.assertEqual(unicode_asucs4(s, l, True), s+'\0') - self.assertEqual(unicode_asucs4(s, l, False), s+'\uffff') - self.assertEqual(unicode_asucs4(s, l+1, True), s+'\0\uffff') - self.assertEqual(unicode_asucs4(s, l+1, False), s+'\0\uffff') - self.assertRaises(SystemError, unicode_asucs4, s, l-1, True) - self.assertRaises(SystemError, unicode_asucs4, s, l-2, False) - s = '\0'.join([s, s]) - self.assertEqual(unicode_asucs4(s, len(s), True), s+'\0') - self.assertEqual(unicode_asucs4(s, len(s), False), s+'\uffff') - - # Test PyUnicode_AsUTF8() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_asutf8(self): - from _testcapi import unicode_asutf8 - - bmp = '\u0100' - bmp2 = '\uffff' - nonbmp = chr(0x10ffff) - - self.assertEqual(unicode_asutf8(bmp), b'\xc4\x80') - self.assertEqual(unicode_asutf8(bmp2), b'\xef\xbf\xbf') - self.assertEqual(unicode_asutf8(nonbmp), b'\xf4\x8f\xbf\xbf') - self.assertRaises(UnicodeEncodeError, unicode_asutf8, 'a\ud800b\udfffc') - - # Test PyUnicode_AsUTF8AndSize() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_asutf8andsize(self): - from _testcapi import unicode_asutf8andsize - - bmp = '\u0100' - bmp2 = '\uffff' - nonbmp = chr(0x10ffff) - - self.assertEqual(unicode_asutf8andsize(bmp), (b'\xc4\x80', 2)) - self.assertEqual(unicode_asutf8andsize(bmp2), (b'\xef\xbf\xbf', 3)) - self.assertEqual(unicode_asutf8andsize(nonbmp), (b'\xf4\x8f\xbf\xbf', 4)) - self.assertRaises(UnicodeEncodeError, unicode_asutf8andsize, 'a\ud800b\udfffc') - - # Test PyUnicode_Count() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_count(self): - from _testcapi import unicode_count - - st = 'abcabd' - self.assertEqual(unicode_count(st, 'a', 0, len(st)), 2) - self.assertEqual(unicode_count(st, 'ab', 0, len(st)), 2) - self.assertEqual(unicode_count(st, 'abc', 0, len(st)), 1) - self.assertEqual(unicode_count(st, 'а', 0, len(st)), 0) # cyrillic "a" - # start < end - self.assertEqual(unicode_count(st, 'a', 3, len(st)), 1) - self.assertEqual(unicode_count(st, 'a', 4, len(st)), 0) - self.assertEqual(unicode_count(st, 'a', 0, sys.maxsize), 2) - # start >= end - self.assertEqual(unicode_count(st, 'abc', 0, 0), 0) - self.assertEqual(unicode_count(st, 'a', 3, 2), 0) - self.assertEqual(unicode_count(st, 'a', sys.maxsize, 5), 0) - # negative - self.assertEqual(unicode_count(st, 'ab', -len(st), -1), 2) - self.assertEqual(unicode_count(st, 'a', -len(st), -3), 1) - # wrong args - self.assertRaises(TypeError, unicode_count, 'a', 'a') - self.assertRaises(TypeError, unicode_count, 'a', 'a', 1) - self.assertRaises(TypeError, unicode_count, 1, 'a', 0, 1) - self.assertRaises(TypeError, unicode_count, 'a', 1, 0, 1) - # empty string - self.assertEqual(unicode_count('abc', '', 0, 3), 4) - self.assertEqual(unicode_count('abc', '', 1, 3), 3) - self.assertEqual(unicode_count('', '', 0, 1), 1) - self.assertEqual(unicode_count('', 'a', 0, 1), 0) - # different unicode kinds - for uni in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": - for ch in uni: - self.assertEqual(unicode_count(uni, ch, 0, len(uni)), 1) - self.assertEqual(unicode_count(st, ch, 0, len(st)), 0) - - # subclasses should still work - class MyStr(str): - pass - - self.assertEqual(unicode_count(MyStr('aab'), 'a', 0, 3), 2) - - # Test PyUnicode_FindChar() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_findchar(self): - from _testcapi import unicode_findchar - - for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": - for i, ch in enumerate(str): - self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), 1), i) - self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), -1), i) - - str = "!>_<!" - self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), 1), -1) - self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), -1), -1) - # start < end - self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, 1), 4) - self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, -1), 4) - # start >= end - self.assertEqual(unicode_findchar(str, ord('!'), 0, 0, 1), -1) - self.assertEqual(unicode_findchar(str, ord('!'), len(str), 0, 1), -1) - # negative - self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, 1), 0) - self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, -1), 0) - - # Test PyUnicode_CopyCharacters() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_copycharacters(self): - from _testcapi import unicode_copycharacters - - strings = [ - 'abcde', '\xa1\xa2\xa3\xa4\xa5', - '\u4f60\u597d\u4e16\u754c\uff01', - '\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604' - ] - - for idx, from_ in enumerate(strings): - # wide -> narrow: exceed maxchar limitation - for to in strings[:idx]: - self.assertRaises( - SystemError, - unicode_copycharacters, to, 0, from_, 0, 5 - ) - # same kind - for from_start in range(5): - self.assertEqual( - unicode_copycharacters(from_, 0, from_, from_start, 5), - (from_[from_start:from_start+5].ljust(5, '\0'), - 5-from_start) - ) - for to_start in range(5): - self.assertEqual( - unicode_copycharacters(from_, to_start, from_, to_start, 5), - (from_[to_start:to_start+5].rjust(5, '\0'), - 5-to_start) - ) - # narrow -> wide - # Tests omitted since this creates invalid strings. - - s = strings[0] - self.assertRaises(IndexError, unicode_copycharacters, s, 6, s, 0, 5) - self.assertRaises(IndexError, unicode_copycharacters, s, -1, s, 0, 5) - self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, 6, 5) - self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, -1, 5) - self.assertRaises(SystemError, unicode_copycharacters, s, 1, s, 0, 5) - self.assertRaises(SystemError, unicode_copycharacters, s, 0, s, 0, -1) - self.assertRaises(SystemError, unicode_copycharacters, s, 0, b'', 0, 0) - - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_pep393_utf8_caching_bug(self): - # Issue #25709: Problem with string concatenation and utf-8 cache - from _testcapi import getargs_s_hash - for k in 0x24, 0xa4, 0x20ac, 0x1f40d: - s = '' - for i in range(5): - # Due to CPython specific optimization the 's' string can be - # resized in-place. - s += chr(k) - # Parsing with the "s#" format code calls indirectly - # PyUnicode_AsUTF8AndSize() which creates the UTF-8 - # encoded string cached in the Unicode object. - self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) - # Check that the second call returns the same result - self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) - class StringModuleTest(unittest.TestCase): def test_formatter_parser(self): def parse(format): diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py index a85bda3144bc35..74503c89e559a0 100644 --- a/Lib/test/test_unicodedata.py +++ b/Lib/test/test_unicodedata.py @@ -12,7 +12,8 @@ import unicodedata import unittest from test.support import (open_urlresource, requires_resource, script_helper, - cpython_only, check_disallow_instantiation) + cpython_only, check_disallow_instantiation, + ResourceDenied) class UnicodeMethodsTest(unittest.TestCase): @@ -364,8 +365,8 @@ def test_normalization(self): except PermissionError: self.skipTest(f"Permission error when downloading {TESTDATAURL} " f"into the test data directory") - except (OSError, HTTPException): - self.fail(f"Could not retrieve {TESTDATAURL}") + except (OSError, HTTPException) as exc: + self.skipTest(f"Failed to download {TESTDATAURL}: {exc}") with testdata: self.run_normalization_tests(testdata) diff --git a/Lib/test/test_unittest/test_async_case.py b/Lib/test/test_unittest/test_async_case.py index fab8270ea33abd..a465103b59b6ec 100644 --- a/Lib/test/test_unittest/test_async_case.py +++ b/Lib/test/test_unittest/test_async_case.py @@ -49,69 +49,87 @@ def setUp(self): self.addCleanup(support.gc_collect) def test_full_cycle(self): + expected = ['setUp', + 'asyncSetUp', + 'test', + 'asyncTearDown', + 'tearDown', + 'cleanup6', + 'cleanup5', + 'cleanup4', + 'cleanup3', + 'cleanup2', + 'cleanup1'] class Test(unittest.IsolatedAsyncioTestCase): def setUp(self): self.assertEqual(events, []) events.append('setUp') VAR.set(VAR.get() + ('setUp',)) + self.addCleanup(self.on_cleanup1) + self.addAsyncCleanup(self.on_cleanup2) async def asyncSetUp(self): - self.assertEqual(events, ['setUp']) + self.assertEqual(events, expected[:1]) events.append('asyncSetUp') VAR.set(VAR.get() + ('asyncSetUp',)) - self.addAsyncCleanup(self.on_cleanup1) + self.addCleanup(self.on_cleanup3) + self.addAsyncCleanup(self.on_cleanup4) async def test_func(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp']) + self.assertEqual(events, expected[:2]) events.append('test') VAR.set(VAR.get() + ('test',)) - self.addAsyncCleanup(self.on_cleanup2) + self.addCleanup(self.on_cleanup5) + self.addAsyncCleanup(self.on_cleanup6) async def asyncTearDown(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test']) + self.assertEqual(events, expected[:3]) VAR.set(VAR.get() + ('asyncTearDown',)) events.append('asyncTearDown') def tearDown(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test', - 'asyncTearDown']) + self.assertEqual(events, expected[:4]) events.append('tearDown') VAR.set(VAR.get() + ('tearDown',)) - async def on_cleanup1(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test', - 'asyncTearDown', - 'tearDown', - 'cleanup2']) + def on_cleanup1(self): + self.assertEqual(events, expected[:10]) events.append('cleanup1') VAR.set(VAR.get() + ('cleanup1',)) nonlocal cvar cvar = VAR.get() async def on_cleanup2(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test', - 'asyncTearDown', - 'tearDown']) + self.assertEqual(events, expected[:9]) events.append('cleanup2') VAR.set(VAR.get() + ('cleanup2',)) + def on_cleanup3(self): + self.assertEqual(events, expected[:8]) + events.append('cleanup3') + VAR.set(VAR.get() + ('cleanup3',)) + + async def on_cleanup4(self): + self.assertEqual(events, expected[:7]) + events.append('cleanup4') + VAR.set(VAR.get() + ('cleanup4',)) + + def on_cleanup5(self): + self.assertEqual(events, expected[:6]) + events.append('cleanup5') + VAR.set(VAR.get() + ('cleanup5',)) + + async def on_cleanup6(self): + self.assertEqual(events, expected[:5]) + events.append('cleanup6') + VAR.set(VAR.get() + ('cleanup6',)) + events = [] cvar = () test = Test("test_func") result = test.run() self.assertEqual(result.errors, []) self.assertEqual(result.failures, []) - expected = ['setUp', 'asyncSetUp', 'test', - 'asyncTearDown', 'tearDown', 'cleanup2', 'cleanup1'] self.assertEqual(events, expected) self.assertEqual(cvar, tuple(expected)) diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py index e71d114751d94d..efd9c902350506 100644 --- a/Lib/test/test_unittest/test_result.py +++ b/Lib/test/test_unittest/test_result.py @@ -275,6 +275,62 @@ def get_exc_info(): self.assertEqual(len(dropped), 1) self.assertIn("raise self.failureException(msg)", dropped[0]) + def test_addFailure_filter_traceback_frames_chained_exception_self_loop(self): + class Foo(unittest.TestCase): + def test_1(self): + pass + + def get_exc_info(): + try: + loop = Exception("Loop") + loop.__cause__ = loop + loop.__context__ = loop + raise loop + except: + return sys.exc_info() + + exc_info_tuple = get_exc_info() + + test = Foo('test_1') + result = unittest.TestResult() + result.startTest(test) + result.addFailure(test, exc_info_tuple) + result.stopTest(test) + + formatted_exc = result.failures[0][1] + self.assertEqual(formatted_exc.count("Exception: Loop\n"), 1) + + def test_addFailure_filter_traceback_frames_chained_exception_cycle(self): + class Foo(unittest.TestCase): + def test_1(self): + pass + + def get_exc_info(): + try: + # Create two directionally opposed cycles + # __cause__ in one direction, __context__ in the other + A, B, C = Exception("A"), Exception("B"), Exception("C") + edges = [(C, B), (B, A), (A, C)] + for ex1, ex2 in edges: + ex1.__cause__ = ex2 + ex2.__context__ = ex1 + raise C + except: + return sys.exc_info() + + exc_info_tuple = get_exc_info() + + test = Foo('test_1') + result = unittest.TestResult() + result.startTest(test) + result.addFailure(test, exc_info_tuple) + result.stopTest(test) + + formatted_exc = result.failures[0][1] + self.assertEqual(formatted_exc.count("Exception: A\n"), 1) + self.assertEqual(formatted_exc.count("Exception: B\n"), 1) + self.assertEqual(formatted_exc.count("Exception: C\n"), 1) + # "addError(test, err)" # ... # "Called when the test case test raises an unexpected exception err diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py index d396f2bab57871..df584b7620d092 100644 --- a/Lib/test/test_unittest/test_runner.py +++ b/Lib/test/test_unittest/test_runner.py @@ -134,11 +134,13 @@ def testCleanupInRun(self): class TestableTest(unittest.TestCase): def setUp(self): ordering.append('setUp') + test.addCleanup(cleanup2) if blowUp: raise Exception('foo') def testNothing(self): ordering.append('test') + test.addCleanup(cleanup3) def tearDown(self): ordering.append('tearDown') @@ -149,8 +151,9 @@ def cleanup1(): ordering.append('cleanup1') def cleanup2(): ordering.append('cleanup2') + def cleanup3(): + ordering.append('cleanup3') test.addCleanup(cleanup1) - test.addCleanup(cleanup2) def success(some_test): self.assertEqual(some_test, test) @@ -160,7 +163,7 @@ def success(some_test): result.addSuccess = success test.run(result) - self.assertEqual(ordering, ['setUp', 'test', 'tearDown', + self.assertEqual(ordering, ['setUp', 'test', 'tearDown', 'cleanup3', 'cleanup2', 'cleanup1', 'success']) blowUp = True @@ -168,7 +171,7 @@ def success(some_test): test = TestableTest('testNothing') test.addCleanup(cleanup1) test.run(result) - self.assertEqual(ordering, ['setUp', 'cleanup1']) + self.assertEqual(ordering, ['setUp', 'cleanup2', 'cleanup1']) def testTestCaseDebugExecutesCleanups(self): ordering = [] @@ -180,9 +183,11 @@ def setUp(self): def testNothing(self): ordering.append('test') + self.addCleanup(cleanup3) def tearDown(self): ordering.append('tearDown') + test.addCleanup(cleanup4) test = TestableTest('testNothing') @@ -191,9 +196,14 @@ def cleanup1(): test.addCleanup(cleanup2) def cleanup2(): ordering.append('cleanup2') + def cleanup3(): + ordering.append('cleanup3') + def cleanup4(): + ordering.append('cleanup4') test.debug() - self.assertEqual(ordering, ['setUp', 'test', 'tearDown', 'cleanup1', 'cleanup2']) + self.assertEqual(ordering, ['setUp', 'test', 'tearDown', 'cleanup4', + 'cleanup3', 'cleanup1', 'cleanup2']) def test_enterContext(self): @@ -352,13 +362,14 @@ def testNothing(self): ordering.append('test') @classmethod def tearDownClass(cls): + ordering.append('tearDownClass') raise Exception('TearDownClassExc') suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest) with self.assertRaises(Exception) as cm: suite.debug() self.assertEqual(str(cm.exception), 'TearDownClassExc') - self.assertEqual(ordering, ['setUpClass', 'test']) + self.assertEqual(ordering, ['setUpClass', 'test', 'tearDownClass']) self.assertTrue(TestableTest._class_cleanups) TestableTest._class_cleanups.clear() @@ -368,7 +379,7 @@ def tearDownClass(cls): with self.assertRaises(Exception) as cm: suite.debug() self.assertEqual(str(cm.exception), 'TearDownClassExc') - self.assertEqual(ordering, ['setUpClass', 'test']) + self.assertEqual(ordering, ['setUpClass', 'test', 'tearDownClass']) self.assertTrue(TestableTest._class_cleanups) TestableTest._class_cleanups.clear() @@ -536,6 +547,33 @@ def testNothing(self): self.assertEqual(TestableTest._class_cleanups, []) + def test_run_nested_test(self): + ordering = [] + + class InnerTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + ordering.append('inner setup') + cls.addClassCleanup(ordering.append, 'inner cleanup') + def test(self): + ordering.append('inner test') + + class OuterTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + ordering.append('outer setup') + cls.addClassCleanup(ordering.append, 'outer cleanup') + def test(self): + ordering.append('start outer test') + runTests(InnerTest) + ordering.append('end outer test') + + runTests(OuterTest) + self.assertEqual(ordering, [ + 'outer setup', 'start outer test', + 'inner setup', 'inner test', 'inner cleanup', + 'end outer test', 'outer cleanup']) + class TestModuleCleanUp(unittest.TestCase): def test_add_and_do_ModuleCleanup(self): @@ -747,6 +785,7 @@ def setUpModule(): unittest.addModuleCleanup(cleanup, ordering) @staticmethod def tearDownModule(): + ordering.append('tearDownModule') raise Exception('CleanUpExc') class TestableTest(unittest.TestCase): @@ -765,7 +804,8 @@ def tearDownClass(cls): self.assertEqual(result.errors[0][1].splitlines()[-1], 'Exception: CleanUpExc') self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test', - 'tearDownClass', 'cleanup_good']) + 'tearDownClass', 'tearDownModule', + 'cleanup_good']) self.assertEqual(unittest.case._module_cleanups, []) def test_debug_module_executes_cleanUp(self): @@ -819,6 +859,7 @@ def setUpModule(): unittest.addModuleCleanup(cleanup, ordering, blowUp=blowUp) @staticmethod def tearDownModule(): + ordering.append('tearDownModule') raise Exception('TearDownModuleExc') class TestableTest(unittest.TestCase): @@ -838,7 +879,7 @@ def tearDownClass(cls): suite.debug() self.assertEqual(str(cm.exception), 'TearDownModuleExc') self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test', - 'tearDownClass']) + 'tearDownClass', 'tearDownModule']) self.assertTrue(unittest.case._module_cleanups) unittest.case._module_cleanups.clear() @@ -849,7 +890,7 @@ def tearDownClass(cls): suite.debug() self.assertEqual(str(cm.exception), 'TearDownModuleExc') self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test', - 'tearDownClass']) + 'tearDownClass', 'tearDownModule']) self.assertTrue(unittest.case._module_cleanups) unittest.case._module_cleanups.clear() diff --git a/Lib/test/test_unittest/testmock/testasync.py b/Lib/test/test_unittest/testmock/testasync.py index 1bab671acdef18..e05a22861d47bf 100644 --- a/Lib/test/test_unittest/testmock/testasync.py +++ b/Lib/test/test_unittest/testmock/testasync.py @@ -149,6 +149,23 @@ async def test_async(): run(test_async()) + def test_patch_dict_async_def(self): + foo = {'a': 'a'} + @patch.dict(foo, {'a': 'b'}) + async def test_async(): + self.assertEqual(foo['a'], 'b') + + self.assertTrue(iscoroutinefunction(test_async)) + run(test_async()) + + def test_patch_dict_async_def_context(self): + foo = {'a': 'a'} + async def test_async(): + with patch.dict(foo, {'a': 'b'}): + self.assertEqual(foo['a'], 'b') + + run(test_async()) + class AsyncMockTest(unittest.TestCase): def test_iscoroutinefunction_default(self): diff --git a/Lib/test/test_unittest/testmock/testsealable.py b/Lib/test/test_unittest/testmock/testsealable.py index daba2b49b46f63..e0c38293cffd7a 100644 --- a/Lib/test/test_unittest/testmock/testsealable.py +++ b/Lib/test/test_unittest/testmock/testsealable.py @@ -200,6 +200,9 @@ def ban(self): self.assertIsInstance(foo.Baz.baz, mock.NonCallableMagicMock) self.assertIsInstance(foo.Baz.ban, mock.MagicMock) + # see gh-91803 + self.assertIsInstance(foo.bar2(), mock.MagicMock) + self.assertEqual(foo.bar1(), 'a') foo.bar1.return_value = 'new_a' self.assertEqual(foo.bar1(), 'new_a') @@ -212,7 +215,7 @@ def ban(self): with self.assertRaises(AttributeError): foo.bar = 1 with self.assertRaises(AttributeError): - foo.bar2() + foo.bar2().x foo.bar2.return_value = 'bar2' self.assertEqual(foo.bar2(), 'bar2') diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index f067560ca6caa1..2df74f5e6f99b2 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -1104,6 +1104,8 @@ def test_unquoting(self): self.assertEqual(result.count('%'), 1, "using unquote(): not all characters escaped: " "%s" % result) + + def test_unquote_rejects_none_and_tuple(self): self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, None) self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, ()) diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index 28f88412fdcaac..498c0382d2137b 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -1824,6 +1824,10 @@ def test_HTTPError_interface(self): expected_errmsg = '<HTTPError %s: %r>' % (err.code, err.msg) self.assertEqual(repr(err), expected_errmsg) + def test_gh_98778(self): + x = urllib.error.HTTPError("url", 405, "METHOD NOT ALLOWED", None, None) + self.assertEqual(getattr(x, "__notes__", ()), ()) + def test_parse_proxy(self): parse_proxy_test_cases = [ ('proxy.example.com', diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 59a601d9e85b08..80fb9e5cd2a445 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -668,6 +668,24 @@ def test_attributes_bad_port(self): with self.assertRaises(ValueError): p.port + def test_attributes_bad_scheme(self): + """Check handling of invalid schemes.""" + for bytes in (False, True): + for parse in (urllib.parse.urlsplit, urllib.parse.urlparse): + for scheme in (".", "+", "-", "0", "http&", "६http"): + with self.subTest(bytes=bytes, parse=parse, scheme=scheme): + url = scheme + "://www.example.net" + if bytes: + if url.isascii(): + url = url.encode("ascii") + else: + continue + p = parse(url) + if bytes: + self.assertEqual(p.scheme, b"") + else: + self.assertEqual(p.scheme, "") + def test_attributes_without_netloc(self): # This example is straight from RFC 3261. It looks like it # should allow the username, hostname, and port to be filled diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 5ce86cffd24886..4e18dfc23c40c2 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -537,6 +537,80 @@ def test_pathsep_error(self): self.assertRaises(ValueError, venv.create, bad_itempath) self.assertRaises(ValueError, venv.create, pathlib.Path(bad_itempath)) + @unittest.skipIf(os.name == 'nt', 'not relevant on Windows') + @requireVenvCreate + def test_zippath_from_non_installed_posix(self): + """ + Test that when create venv from non-installed python, the zip path + value is as expected. + """ + rmtree(self.env_dir) + # First try to create a non-installed python. It's not a real full + # functional non-installed python, but enough for this test. + platlibdir = sys.platlibdir + non_installed_dir = os.path.realpath(tempfile.mkdtemp()) + self.addCleanup(rmtree, non_installed_dir) + bindir = os.path.join(non_installed_dir, self.bindir) + os.mkdir(bindir) + shutil.copy2(sys.executable, bindir) + libdir = os.path.join(non_installed_dir, platlibdir, self.lib[1]) + os.makedirs(libdir) + landmark = os.path.join(libdir, "os.py") + stdlib_zip = "python%d%d.zip" % sys.version_info[:2] + zip_landmark = os.path.join(non_installed_dir, + platlibdir, + stdlib_zip) + additional_pythonpath_for_non_installed = [] + # Copy stdlib files to the non-installed python so venv can + # correctly calculate the prefix. + for eachpath in sys.path: + if eachpath.endswith(".zip"): + if os.path.isfile(eachpath): + shutil.copyfile( + eachpath, + os.path.join(non_installed_dir, platlibdir)) + elif os.path.isfile(os.path.join(eachpath, "os.py")): + for name in os.listdir(eachpath): + if name == "site-packages": + continue + fn = os.path.join(eachpath, name) + if os.path.isfile(fn): + shutil.copy(fn, libdir) + elif os.path.isdir(fn): + shutil.copytree(fn, os.path.join(libdir, name)) + else: + additional_pythonpath_for_non_installed.append( + eachpath) + cmd = [os.path.join(non_installed_dir, self.bindir, self.exe), + "-m", + "venv", + "--without-pip", + self.env_dir] + # Our fake non-installed python is not fully functional because + # it cannot find the extensions. Set PYTHONPATH so it can run the + # venv module correctly. + pythonpath = os.pathsep.join( + additional_pythonpath_for_non_installed) + # For python built with shared enabled. We need to set + # LD_LIBRARY_PATH so the non-installed python can find and link + # libpython.so + ld_library_path = sysconfig.get_config_var("LIBDIR") + if not ld_library_path or sysconfig.is_python_build(): + ld_library_path = os.path.abspath(os.path.dirname(sys.executable)) + if sys.platform == 'darwin': + ld_library_path_env = "DYLD_LIBRARY_PATH" + else: + ld_library_path_env = "LD_LIBRARY_PATH" + subprocess.check_call(cmd, + env={"PYTHONPATH": pythonpath, + ld_library_path_env: ld_library_path}) + envpy = os.path.join(self.env_dir, self.bindir, self.exe) + # Now check the venv created from the non-installed python has + # correct zip path in pythonpath. + cmd = [envpy, '-S', '-c', 'import sys; print(sys.path)'] + out, err = check_output(cmd) + self.assertTrue(zip_landmark.encode() in out) + @requireVenvCreate class EnsurePipTest(BaseTest): """Test venv module installation of pip.""" diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 3a9573d6e70559..7c5920797d2538 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -597,7 +597,7 @@ class C(object): # deallocation of c2. del c2 - def test_callback_in_cycle_1(self): + def test_callback_in_cycle(self): import gc class J(object): @@ -637,40 +637,11 @@ def acallback(self, ignore): del I, J, II gc.collect() - def test_callback_in_cycle_2(self): + def test_callback_reachable_one_way(self): import gc - # This is just like test_callback_in_cycle_1, except that II is an - # old-style class. The symptom is different then: an instance of an - # old-style class looks in its own __dict__ first. 'J' happens to - # get cleared from I.__dict__ before 'wr', and 'J' was never in II's - # __dict__, so the attribute isn't found. The difference is that - # the old-style II doesn't have a NULL __mro__ (it doesn't have any - # __mro__), so no segfault occurs. Instead it got: - # test_callback_in_cycle_2 (__main__.ReferencesTestCase) ... - # Exception exceptions.AttributeError: - # "II instance has no attribute 'J'" in <bound method II.acallback - # of <?.II instance at 0x00B9B4B8>> ignored - - class J(object): - pass - - class II: - def acallback(self, ignore): - self.J - - I = II() - I.J = J - I.wr = weakref.ref(J, I.acallback) - - del I, J, II - gc.collect() - - def test_callback_in_cycle_3(self): - import gc - - # This one broke the first patch that fixed the last two. In this - # case, the objects reachable from the callback aren't also reachable + # This one broke the first patch that fixed the previous test. In this case, + # the objects reachable from the callback aren't also reachable # from the object (c1) *triggering* the callback: you can get to # c1 from c2, but not vice-versa. The result was that c2's __dict__ # got tp_clear'ed by the time the c2.cb callback got invoked. @@ -690,10 +661,10 @@ def cb(self, ignore): del c1, c2 gc.collect() - def test_callback_in_cycle_4(self): + def test_callback_different_classes(self): import gc - # Like test_callback_in_cycle_3, except c2 and c1 have different + # Like test_callback_reachable_one_way, except c2 and c1 have different # classes. c2's class (C) isn't reachable from c1 then, so protecting # objects reachable from the dying object (c1) isn't enough to stop # c2's class (C) from getting tp_clear'ed before c2.cb is invoked. diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py index 8157c2da6efaa6..769ab67b0f5611 100644 --- a/Lib/test/test_winreg.py +++ b/Lib/test/test_winreg.py @@ -113,7 +113,6 @@ def _write_test_data(self, root_key, subkeystr="sub_key", "does not close the actual key!") except OSError: pass - def _read_test_data(self, root_key, subkeystr="sub_key", OpenKey=OpenKey): # Check we can get default value for this key. val = QueryValue(root_key, test_key_name) @@ -340,6 +339,23 @@ def test_setvalueex_value_range(self): finally: DeleteKey(HKEY_CURRENT_USER, test_key_name) + def test_setvalueex_negative_one_check(self): + # Test for Issue #43984, check -1 was not set by SetValueEx. + # Py2Reg, which gets called by SetValueEx, wasn't checking return + # value by PyLong_AsUnsignedLong, thus setting -1 as value in the registry. + # The implementation now checks PyLong_AsUnsignedLong return value to assure + # the value set was not -1. + try: + with CreateKey(HKEY_CURRENT_USER, test_key_name) as ck: + with self.assertRaises(OverflowError): + SetValueEx(ck, "test_name_dword", None, REG_DWORD, -1) + SetValueEx(ck, "test_name_qword", None, REG_QWORD, -1) + self.assertRaises(FileNotFoundError, QueryValueEx, ck, "test_name_dword") + self.assertRaises(FileNotFoundError, QueryValueEx, ck, "test_name_qword") + + finally: + DeleteKey(HKEY_CURRENT_USER, test_key_name) + def test_queryvalueex_return_value(self): # Test for Issue #16759, return unsigned int from QueryValueEx. # Reg2Py, which gets called by QueryValueEx, was returning a value diff --git a/Lib/test/test_zipfile/__init__.py b/Lib/test/test_zipfile/__init__.py new file mode 100644 index 00000000000000..4b16ecc31156a5 --- /dev/null +++ b/Lib/test/test_zipfile/__init__.py @@ -0,0 +1,5 @@ +import os +from test.support import load_package_tests + +def load_tests(*args): + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_zipfile/__main__.py b/Lib/test/test_zipfile/__main__.py new file mode 100644 index 00000000000000..e25ac946edffe4 --- /dev/null +++ b/Lib/test/test_zipfile/__main__.py @@ -0,0 +1,7 @@ +import unittest + +from . import load_tests # noqa: F401 + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_zipfile/_functools.py b/Lib/test/test_zipfile/_functools.py new file mode 100644 index 00000000000000..75f2b20e06d77f --- /dev/null +++ b/Lib/test/test_zipfile/_functools.py @@ -0,0 +1,9 @@ +import functools + + +# from jaraco.functools 3.5.2 +def compose(*funcs): + def compose_two(f1, f2): + return lambda *args, **kwargs: f1(f2(*args, **kwargs)) + + return functools.reduce(compose_two, funcs) diff --git a/Lib/test/test_zipfile/_itertools.py b/Lib/test/test_zipfile/_itertools.py new file mode 100644 index 00000000000000..559f3f111b88a3 --- /dev/null +++ b/Lib/test/test_zipfile/_itertools.py @@ -0,0 +1,12 @@ +# from more_itertools v8.13.0 +def always_iterable(obj, base_type=(str, bytes)): + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/Lib/test/test_zipfile/_test_params.py b/Lib/test/test_zipfile/_test_params.py new file mode 100644 index 00000000000000..bc95b4ebf4a168 --- /dev/null +++ b/Lib/test/test_zipfile/_test_params.py @@ -0,0 +1,39 @@ +import types +import functools + +from ._itertools import always_iterable + + +def parameterize(names, value_groups): + """ + Decorate a test method to run it as a set of subtests. + + Modeled after pytest.parametrize. + """ + + def decorator(func): + @functools.wraps(func) + def wrapped(self): + for values in value_groups: + resolved = map(Invoked.eval, always_iterable(values)) + params = dict(zip(always_iterable(names), resolved)) + with self.subTest(**params): + func(self, **params) + + return wrapped + + return decorator + + +class Invoked(types.SimpleNamespace): + """ + Wrap a function to be invoked for each usage. + """ + + @classmethod + def wrap(cls, func): + return cls(func=func) + + @classmethod + def eval(cls, cand): + return cand.func() if isinstance(cand, cls) else cand diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile/test_core.py similarity index 90% rename from Lib/test/test_zipfile.py rename to Lib/test/test_zipfile/test_core.py index 6f6f4bc26b0d40..bb0f1467735bcf 100644 --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile/test_core.py @@ -6,7 +6,6 @@ import os import pathlib import posixpath -import string import struct import subprocess import sys @@ -14,7 +13,6 @@ import unittest import unittest.mock as mock import zipfile -import functools from tempfile import TemporaryFile @@ -2715,13 +2713,13 @@ def tearDown(self): class ZipInfoTests(unittest.TestCase): def test_from_file(self): zi = zipfile.ZipInfo.from_file(__file__) - self.assertEqual(posixpath.basename(zi.filename), 'test_zipfile.py') + self.assertEqual(posixpath.basename(zi.filename), 'test_core.py') self.assertFalse(zi.is_dir()) self.assertEqual(zi.file_size, os.path.getsize(__file__)) def test_from_file_pathlike(self): zi = zipfile.ZipInfo.from_file(pathlib.Path(__file__)) - self.assertEqual(posixpath.basename(zi.filename), 'test_zipfile.py') + self.assertEqual(posixpath.basename(zi.filename), 'test_core.py') self.assertFalse(zi.is_dir()) self.assertEqual(zi.file_size, os.path.getsize(__file__)) @@ -2867,420 +2865,6 @@ def test_execute_zip64(self): self.assertIn(b'number in executable: 5', output) -# Poor man's technique to consume a (smallish) iterable. -consume = tuple - - -# from jaraco.itertools 5.0 -class jaraco: - class itertools: - class Counter: - def __init__(self, i): - self.count = 0 - self._orig_iter = iter(i) - - def __iter__(self): - return self - - def __next__(self): - result = next(self._orig_iter) - self.count += 1 - return result - - -def add_dirs(zf): - """ - Given a writable zip file zf, inject directory entries for - any directories implied by the presence of children. - """ - for name in zipfile.CompleteDirs._implied_dirs(zf.namelist()): - zf.writestr(name, b"") - return zf - - -def build_alpharep_fixture(): - """ - Create a zip file with this structure: - - . - ├── a.txt - ├── b - │ ├── c.txt - │ ├── d - │ │ └── e.txt - │ └── f.txt - └── g - └── h - └── i.txt - - This fixture has the following key characteristics: - - - a file at the root (a) - - a file two levels deep (b/d/e) - - multiple files in a directory (b/c, b/f) - - a directory containing only a directory (g/h) - - "alpha" because it uses alphabet - "rep" because it's a representative example - """ - data = io.BytesIO() - zf = zipfile.ZipFile(data, "w") - zf.writestr("a.txt", b"content of a") - zf.writestr("b/c.txt", b"content of c") - zf.writestr("b/d/e.txt", b"content of e") - zf.writestr("b/f.txt", b"content of f") - zf.writestr("g/h/i.txt", b"content of i") - zf.filename = "alpharep.zip" - return zf - - -def pass_alpharep(meth): - """ - Given a method, wrap it in a for loop that invokes method - with each subtest. - """ - - @functools.wraps(meth) - def wrapper(self): - for alpharep in self.zipfile_alpharep(): - meth(self, alpharep=alpharep) - - return wrapper - - -class TestPath(unittest.TestCase): - def setUp(self): - self.fixtures = contextlib.ExitStack() - self.addCleanup(self.fixtures.close) - - def zipfile_alpharep(self): - with self.subTest(): - yield build_alpharep_fixture() - with self.subTest(): - yield add_dirs(build_alpharep_fixture()) - - def zipfile_ondisk(self, alpharep): - tmpdir = pathlib.Path(self.fixtures.enter_context(temp_dir())) - buffer = alpharep.fp - alpharep.close() - path = tmpdir / alpharep.filename - with path.open("wb") as strm: - strm.write(buffer.getvalue()) - return path - - @pass_alpharep - def test_iterdir_and_types(self, alpharep): - root = zipfile.Path(alpharep) - assert root.is_dir() - a, b, g = root.iterdir() - assert a.is_file() - assert b.is_dir() - assert g.is_dir() - c, f, d = b.iterdir() - assert c.is_file() and f.is_file() - (e,) = d.iterdir() - assert e.is_file() - (h,) = g.iterdir() - (i,) = h.iterdir() - assert i.is_file() - - @pass_alpharep - def test_is_file_missing(self, alpharep): - root = zipfile.Path(alpharep) - assert not root.joinpath('missing.txt').is_file() - - @pass_alpharep - def test_iterdir_on_file(self, alpharep): - root = zipfile.Path(alpharep) - a, b, g = root.iterdir() - with self.assertRaises(ValueError): - a.iterdir() - - @pass_alpharep - def test_subdir_is_dir(self, alpharep): - root = zipfile.Path(alpharep) - assert (root / 'b').is_dir() - assert (root / 'b/').is_dir() - assert (root / 'g').is_dir() - assert (root / 'g/').is_dir() - - @pass_alpharep - def test_open(self, alpharep): - root = zipfile.Path(alpharep) - a, b, g = root.iterdir() - with a.open(encoding="utf-8") as strm: - data = strm.read() - assert data == "content of a" - - def test_open_write(self): - """ - If the zipfile is open for write, it should be possible to - write bytes or text to it. - """ - zf = zipfile.Path(zipfile.ZipFile(io.BytesIO(), mode='w')) - with zf.joinpath('file.bin').open('wb') as strm: - strm.write(b'binary contents') - with zf.joinpath('file.txt').open('w', encoding="utf-8") as strm: - strm.write('text file') - - def test_open_extant_directory(self): - """ - Attempting to open a directory raises IsADirectoryError. - """ - zf = zipfile.Path(add_dirs(build_alpharep_fixture())) - with self.assertRaises(IsADirectoryError): - zf.joinpath('b').open() - - @pass_alpharep - def test_open_binary_invalid_args(self, alpharep): - root = zipfile.Path(alpharep) - with self.assertRaises(ValueError): - root.joinpath('a.txt').open('rb', encoding='utf-8') - with self.assertRaises(ValueError): - root.joinpath('a.txt').open('rb', 'utf-8') - - def test_open_missing_directory(self): - """ - Attempting to open a missing directory raises FileNotFoundError. - """ - zf = zipfile.Path(add_dirs(build_alpharep_fixture())) - with self.assertRaises(FileNotFoundError): - zf.joinpath('z').open() - - @pass_alpharep - def test_read(self, alpharep): - root = zipfile.Path(alpharep) - a, b, g = root.iterdir() - assert a.read_text(encoding="utf-8") == "content of a" - assert a.read_bytes() == b"content of a" - - @pass_alpharep - def test_joinpath(self, alpharep): - root = zipfile.Path(alpharep) - a = root.joinpath("a.txt") - assert a.is_file() - e = root.joinpath("b").joinpath("d").joinpath("e.txt") - assert e.read_text(encoding="utf-8") == "content of e" - - @pass_alpharep - def test_joinpath_multiple(self, alpharep): - root = zipfile.Path(alpharep) - e = root.joinpath("b", "d", "e.txt") - assert e.read_text(encoding="utf-8") == "content of e" - - @pass_alpharep - def test_traverse_truediv(self, alpharep): - root = zipfile.Path(alpharep) - a = root / "a.txt" - assert a.is_file() - e = root / "b" / "d" / "e.txt" - assert e.read_text(encoding="utf-8") == "content of e" - - @pass_alpharep - def test_traverse_simplediv(self, alpharep): - """ - Disable the __future__.division when testing traversal. - """ - code = compile( - source="zipfile.Path(alpharep) / 'a'", - filename="(test)", - mode="eval", - dont_inherit=True, - ) - eval(code) - - @pass_alpharep - def test_pathlike_construction(self, alpharep): - """ - zipfile.Path should be constructable from a path-like object - """ - zipfile_ondisk = self.zipfile_ondisk(alpharep) - pathlike = pathlib.Path(str(zipfile_ondisk)) - zipfile.Path(pathlike) - - @pass_alpharep - def test_traverse_pathlike(self, alpharep): - root = zipfile.Path(alpharep) - root / pathlib.Path("a") - - @pass_alpharep - def test_parent(self, alpharep): - root = zipfile.Path(alpharep) - assert (root / 'a').parent.at == '' - assert (root / 'a' / 'b').parent.at == 'a/' - - @pass_alpharep - def test_dir_parent(self, alpharep): - root = zipfile.Path(alpharep) - assert (root / 'b').parent.at == '' - assert (root / 'b/').parent.at == '' - - @pass_alpharep - def test_missing_dir_parent(self, alpharep): - root = zipfile.Path(alpharep) - assert (root / 'missing dir/').parent.at == '' - - @pass_alpharep - def test_mutability(self, alpharep): - """ - If the underlying zipfile is changed, the Path object should - reflect that change. - """ - root = zipfile.Path(alpharep) - a, b, g = root.iterdir() - alpharep.writestr('foo.txt', 'foo') - alpharep.writestr('bar/baz.txt', 'baz') - assert any(child.name == 'foo.txt' for child in root.iterdir()) - assert (root / 'foo.txt').read_text(encoding="utf-8") == 'foo' - (baz,) = (root / 'bar').iterdir() - assert baz.read_text(encoding="utf-8") == 'baz' - - HUGE_ZIPFILE_NUM_ENTRIES = 2 ** 13 - - def huge_zipfile(self): - """Create a read-only zipfile with a huge number of entries entries.""" - strm = io.BytesIO() - zf = zipfile.ZipFile(strm, "w") - for entry in map(str, range(self.HUGE_ZIPFILE_NUM_ENTRIES)): - zf.writestr(entry, entry) - zf.mode = 'r' - return zf - - def test_joinpath_constant_time(self): - """ - Ensure joinpath on items in zipfile is linear time. - """ - root = zipfile.Path(self.huge_zipfile()) - entries = jaraco.itertools.Counter(root.iterdir()) - for entry in entries: - entry.joinpath('suffix') - # Check the file iterated all items - assert entries.count == self.HUGE_ZIPFILE_NUM_ENTRIES - - # @func_timeout.func_set_timeout(3) - def test_implied_dirs_performance(self): - data = ['/'.join(string.ascii_lowercase + str(n)) for n in range(10000)] - zipfile.CompleteDirs._implied_dirs(data) - - @pass_alpharep - def test_read_does_not_close(self, alpharep): - alpharep = self.zipfile_ondisk(alpharep) - with zipfile.ZipFile(alpharep) as file: - for rep in range(2): - zipfile.Path(file, 'a.txt').read_text(encoding="utf-8") - - @pass_alpharep - def test_subclass(self, alpharep): - class Subclass(zipfile.Path): - pass - - root = Subclass(alpharep) - assert isinstance(root / 'b', Subclass) - - @pass_alpharep - def test_filename(self, alpharep): - root = zipfile.Path(alpharep) - assert root.filename == pathlib.Path('alpharep.zip') - - @pass_alpharep - def test_root_name(self, alpharep): - """ - The name of the root should be the name of the zipfile - """ - root = zipfile.Path(alpharep) - assert root.name == 'alpharep.zip' == root.filename.name - - @pass_alpharep - def test_suffix(self, alpharep): - """ - The suffix of the root should be the suffix of the zipfile. - The suffix of each nested file is the final component's last suffix, if any. - Includes the leading period, just like pathlib.Path. - """ - root = zipfile.Path(alpharep) - assert root.suffix == '.zip' == root.filename.suffix - - b = root / "b.txt" - assert b.suffix == ".txt" - - c = root / "c" / "filename.tar.gz" - assert c.suffix == ".gz" - - d = root / "d" - assert d.suffix == "" - - @pass_alpharep - def test_suffixes(self, alpharep): - """ - The suffix of the root should be the suffix of the zipfile. - The suffix of each nested file is the final component's last suffix, if any. - Includes the leading period, just like pathlib.Path. - """ - root = zipfile.Path(alpharep) - assert root.suffixes == ['.zip'] == root.filename.suffixes - - b = root / 'b.txt' - assert b.suffixes == ['.txt'] - - c = root / 'c' / 'filename.tar.gz' - assert c.suffixes == ['.tar', '.gz'] - - d = root / 'd' - assert d.suffixes == [] - - e = root / '.hgrc' - assert e.suffixes == [] - - @pass_alpharep - def test_stem(self, alpharep): - """ - The final path component, without its suffix - """ - root = zipfile.Path(alpharep) - assert root.stem == 'alpharep' == root.filename.stem - - b = root / "b.txt" - assert b.stem == "b" - - c = root / "c" / "filename.tar.gz" - assert c.stem == "filename.tar" - - d = root / "d" - assert d.stem == "d" - - @pass_alpharep - def test_root_parent(self, alpharep): - root = zipfile.Path(alpharep) - assert root.parent == pathlib.Path('.') - root.root.filename = 'foo/bar.zip' - assert root.parent == pathlib.Path('foo') - - @pass_alpharep - def test_root_unnamed(self, alpharep): - """ - It is an error to attempt to get the name - or parent of an unnamed zipfile. - """ - alpharep.filename = None - root = zipfile.Path(alpharep) - with self.assertRaises(TypeError): - root.name - with self.assertRaises(TypeError): - root.parent - - # .name and .parent should still work on subs - sub = root / "b" - assert sub.name == "b" - assert sub.parent - - @pass_alpharep - def test_inheritance(self, alpharep): - cls = type('PathChild', (zipfile.Path,), {}) - for alpharep in self.zipfile_alpharep(): - file = cls(alpharep).joinpath('some dir').parent - assert isinstance(file, cls) - - class EncodedMetadataTests(unittest.TestCase): file_names = ['\u4e00', '\u4e8c', '\u4e09'] # Han 'one', 'two', 'three' file_content = [ diff --git a/Lib/test/test_zipfile/test_path.py b/Lib/test/test_zipfile/test_path.py new file mode 100644 index 00000000000000..02253c59e959fb --- /dev/null +++ b/Lib/test/test_zipfile/test_path.py @@ -0,0 +1,431 @@ +import io +import zipfile +import contextlib +import pathlib +import unittest +import string +import pickle +import itertools + +from ._test_params import parameterize, Invoked +from ._functools import compose + + +from test.support.os_helper import temp_dir + + +# Poor man's technique to consume a (smallish) iterable. +consume = tuple + + +# from jaraco.itertools 5.0 +class jaraco: + class itertools: + class Counter: + def __init__(self, i): + self.count = 0 + self._orig_iter = iter(i) + + def __iter__(self): + return self + + def __next__(self): + result = next(self._orig_iter) + self.count += 1 + return result + + +def add_dirs(zf): + """ + Given a writable zip file zf, inject directory entries for + any directories implied by the presence of children. + """ + for name in zipfile.CompleteDirs._implied_dirs(zf.namelist()): + zf.writestr(name, b"") + return zf + + +def build_alpharep_fixture(): + """ + Create a zip file with this structure: + + . + ├── a.txt + ├── b + │ ├── c.txt + │ ├── d + │ │ └── e.txt + │ └── f.txt + └── g + └── h + └── i.txt + + This fixture has the following key characteristics: + + - a file at the root (a) + - a file two levels deep (b/d/e) + - multiple files in a directory (b/c, b/f) + - a directory containing only a directory (g/h) + + "alpha" because it uses alphabet + "rep" because it's a representative example + """ + data = io.BytesIO() + zf = zipfile.ZipFile(data, "w") + zf.writestr("a.txt", b"content of a") + zf.writestr("b/c.txt", b"content of c") + zf.writestr("b/d/e.txt", b"content of e") + zf.writestr("b/f.txt", b"content of f") + zf.writestr("g/h/i.txt", b"content of i") + zf.filename = "alpharep.zip" + return zf + + +alpharep_generators = [ + Invoked.wrap(build_alpharep_fixture), + Invoked.wrap(compose(add_dirs, build_alpharep_fixture)), +] + +pass_alpharep = parameterize(['alpharep'], alpharep_generators) + + +class TestPath(unittest.TestCase): + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + + def zipfile_ondisk(self, alpharep): + tmpdir = pathlib.Path(self.fixtures.enter_context(temp_dir())) + buffer = alpharep.fp + alpharep.close() + path = tmpdir / alpharep.filename + with path.open("wb") as strm: + strm.write(buffer.getvalue()) + return path + + @pass_alpharep + def test_iterdir_and_types(self, alpharep): + root = zipfile.Path(alpharep) + assert root.is_dir() + a, b, g = root.iterdir() + assert a.is_file() + assert b.is_dir() + assert g.is_dir() + c, f, d = b.iterdir() + assert c.is_file() and f.is_file() + (e,) = d.iterdir() + assert e.is_file() + (h,) = g.iterdir() + (i,) = h.iterdir() + assert i.is_file() + + @pass_alpharep + def test_is_file_missing(self, alpharep): + root = zipfile.Path(alpharep) + assert not root.joinpath('missing.txt').is_file() + + @pass_alpharep + def test_iterdir_on_file(self, alpharep): + root = zipfile.Path(alpharep) + a, b, g = root.iterdir() + with self.assertRaises(ValueError): + a.iterdir() + + @pass_alpharep + def test_subdir_is_dir(self, alpharep): + root = zipfile.Path(alpharep) + assert (root / 'b').is_dir() + assert (root / 'b/').is_dir() + assert (root / 'g').is_dir() + assert (root / 'g/').is_dir() + + @pass_alpharep + def test_open(self, alpharep): + root = zipfile.Path(alpharep) + a, b, g = root.iterdir() + with a.open(encoding="utf-8") as strm: + data = strm.read() + assert data == "content of a" + + def test_open_write(self): + """ + If the zipfile is open for write, it should be possible to + write bytes or text to it. + """ + zf = zipfile.Path(zipfile.ZipFile(io.BytesIO(), mode='w')) + with zf.joinpath('file.bin').open('wb') as strm: + strm.write(b'binary contents') + with zf.joinpath('file.txt').open('w', encoding="utf-8") as strm: + strm.write('text file') + + def test_open_extant_directory(self): + """ + Attempting to open a directory raises IsADirectoryError. + """ + zf = zipfile.Path(add_dirs(build_alpharep_fixture())) + with self.assertRaises(IsADirectoryError): + zf.joinpath('b').open() + + @pass_alpharep + def test_open_binary_invalid_args(self, alpharep): + root = zipfile.Path(alpharep) + with self.assertRaises(ValueError): + root.joinpath('a.txt').open('rb', encoding='utf-8') + with self.assertRaises(ValueError): + root.joinpath('a.txt').open('rb', 'utf-8') + + def test_open_missing_directory(self): + """ + Attempting to open a missing directory raises FileNotFoundError. + """ + zf = zipfile.Path(add_dirs(build_alpharep_fixture())) + with self.assertRaises(FileNotFoundError): + zf.joinpath('z').open() + + @pass_alpharep + def test_read(self, alpharep): + root = zipfile.Path(alpharep) + a, b, g = root.iterdir() + assert a.read_text(encoding="utf-8") == "content of a" + assert a.read_bytes() == b"content of a" + + @pass_alpharep + def test_joinpath(self, alpharep): + root = zipfile.Path(alpharep) + a = root.joinpath("a.txt") + assert a.is_file() + e = root.joinpath("b").joinpath("d").joinpath("e.txt") + assert e.read_text(encoding="utf-8") == "content of e" + + @pass_alpharep + def test_joinpath_multiple(self, alpharep): + root = zipfile.Path(alpharep) + e = root.joinpath("b", "d", "e.txt") + assert e.read_text(encoding="utf-8") == "content of e" + + @pass_alpharep + def test_traverse_truediv(self, alpharep): + root = zipfile.Path(alpharep) + a = root / "a.txt" + assert a.is_file() + e = root / "b" / "d" / "e.txt" + assert e.read_text(encoding="utf-8") == "content of e" + + @pass_alpharep + def test_traverse_simplediv(self, alpharep): + """ + Disable the __future__.division when testing traversal. + """ + code = compile( + source="zipfile.Path(alpharep) / 'a'", + filename="(test)", + mode="eval", + dont_inherit=True, + ) + eval(code) + + @pass_alpharep + def test_pathlike_construction(self, alpharep): + """ + zipfile.Path should be constructable from a path-like object + """ + zipfile_ondisk = self.zipfile_ondisk(alpharep) + pathlike = pathlib.Path(str(zipfile_ondisk)) + zipfile.Path(pathlike) + + @pass_alpharep + def test_traverse_pathlike(self, alpharep): + root = zipfile.Path(alpharep) + root / pathlib.Path("a") + + @pass_alpharep + def test_parent(self, alpharep): + root = zipfile.Path(alpharep) + assert (root / 'a').parent.at == '' + assert (root / 'a' / 'b').parent.at == 'a/' + + @pass_alpharep + def test_dir_parent(self, alpharep): + root = zipfile.Path(alpharep) + assert (root / 'b').parent.at == '' + assert (root / 'b/').parent.at == '' + + @pass_alpharep + def test_missing_dir_parent(self, alpharep): + root = zipfile.Path(alpharep) + assert (root / 'missing dir/').parent.at == '' + + @pass_alpharep + def test_mutability(self, alpharep): + """ + If the underlying zipfile is changed, the Path object should + reflect that change. + """ + root = zipfile.Path(alpharep) + a, b, g = root.iterdir() + alpharep.writestr('foo.txt', 'foo') + alpharep.writestr('bar/baz.txt', 'baz') + assert any(child.name == 'foo.txt' for child in root.iterdir()) + assert (root / 'foo.txt').read_text(encoding="utf-8") == 'foo' + (baz,) = (root / 'bar').iterdir() + assert baz.read_text(encoding="utf-8") == 'baz' + + HUGE_ZIPFILE_NUM_ENTRIES = 2**13 + + def huge_zipfile(self): + """Create a read-only zipfile with a huge number of entries entries.""" + strm = io.BytesIO() + zf = zipfile.ZipFile(strm, "w") + for entry in map(str, range(self.HUGE_ZIPFILE_NUM_ENTRIES)): + zf.writestr(entry, entry) + zf.mode = 'r' + return zf + + def test_joinpath_constant_time(self): + """ + Ensure joinpath on items in zipfile is linear time. + """ + root = zipfile.Path(self.huge_zipfile()) + entries = jaraco.itertools.Counter(root.iterdir()) + for entry in entries: + entry.joinpath('suffix') + # Check the file iterated all items + assert entries.count == self.HUGE_ZIPFILE_NUM_ENTRIES + + # @func_timeout.func_set_timeout(3) + def test_implied_dirs_performance(self): + data = ['/'.join(string.ascii_lowercase + str(n)) for n in range(10000)] + zipfile.CompleteDirs._implied_dirs(data) + + @pass_alpharep + def test_read_does_not_close(self, alpharep): + alpharep = self.zipfile_ondisk(alpharep) + with zipfile.ZipFile(alpharep) as file: + for rep in range(2): + zipfile.Path(file, 'a.txt').read_text(encoding="utf-8") + + @pass_alpharep + def test_subclass(self, alpharep): + class Subclass(zipfile.Path): + pass + + root = Subclass(alpharep) + assert isinstance(root / 'b', Subclass) + + @pass_alpharep + def test_filename(self, alpharep): + root = zipfile.Path(alpharep) + assert root.filename == pathlib.Path('alpharep.zip') + + @pass_alpharep + def test_root_name(self, alpharep): + """ + The name of the root should be the name of the zipfile + """ + root = zipfile.Path(alpharep) + assert root.name == 'alpharep.zip' == root.filename.name + + @pass_alpharep + def test_suffix(self, alpharep): + """ + The suffix of the root should be the suffix of the zipfile. + The suffix of each nested file is the final component's last suffix, if any. + Includes the leading period, just like pathlib.Path. + """ + root = zipfile.Path(alpharep) + assert root.suffix == '.zip' == root.filename.suffix + + b = root / "b.txt" + assert b.suffix == ".txt" + + c = root / "c" / "filename.tar.gz" + assert c.suffix == ".gz" + + d = root / "d" + assert d.suffix == "" + + @pass_alpharep + def test_suffixes(self, alpharep): + """ + The suffix of the root should be the suffix of the zipfile. + The suffix of each nested file is the final component's last suffix, if any. + Includes the leading period, just like pathlib.Path. + """ + root = zipfile.Path(alpharep) + assert root.suffixes == ['.zip'] == root.filename.suffixes + + b = root / 'b.txt' + assert b.suffixes == ['.txt'] + + c = root / 'c' / 'filename.tar.gz' + assert c.suffixes == ['.tar', '.gz'] + + d = root / 'd' + assert d.suffixes == [] + + e = root / '.hgrc' + assert e.suffixes == [] + + @pass_alpharep + def test_stem(self, alpharep): + """ + The final path component, without its suffix + """ + root = zipfile.Path(alpharep) + assert root.stem == 'alpharep' == root.filename.stem + + b = root / "b.txt" + assert b.stem == "b" + + c = root / "c" / "filename.tar.gz" + assert c.stem == "filename.tar" + + d = root / "d" + assert d.stem == "d" + + @pass_alpharep + def test_root_parent(self, alpharep): + root = zipfile.Path(alpharep) + assert root.parent == pathlib.Path('.') + root.root.filename = 'foo/bar.zip' + assert root.parent == pathlib.Path('foo') + + @pass_alpharep + def test_root_unnamed(self, alpharep): + """ + It is an error to attempt to get the name + or parent of an unnamed zipfile. + """ + alpharep.filename = None + root = zipfile.Path(alpharep) + with self.assertRaises(TypeError): + root.name + with self.assertRaises(TypeError): + root.parent + + # .name and .parent should still work on subs + sub = root / "b" + assert sub.name == "b" + assert sub.parent + + @pass_alpharep + def test_inheritance(self, alpharep): + cls = type('PathChild', (zipfile.Path,), {}) + file = cls(alpharep).joinpath('some dir').parent + assert isinstance(file, cls) + + @parameterize( + ['alpharep', 'path_type', 'subpath'], + itertools.product( + alpharep_generators, + [str, pathlib.Path], + ['', 'b/'], + ), + ) + def test_pickle(self, alpharep, path_type, subpath): + zipfile_ondisk = path_type(self.zipfile_ondisk(alpharep)) + + saved_1 = pickle.dumps(zipfile.Path(zipfile_ondisk, at=subpath)) + restored_1 = pickle.loads(saved_1) + first, *rest = restored_1.iterdir() + assert first.read_text().startswith('content of ') diff --git a/Lib/trace.py b/Lib/trace.py index 2cf3643878d4b8..213e46517d683d 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -172,7 +172,7 @@ def __init__(self, counts=None, calledfuncs=None, infile=None, try: with open(self.infile, 'rb') as f: counts, calledfuncs, callers = pickle.load(f) - self.update(self.__class__(counts, calledfuncs, callers)) + self.update(self.__class__(counts, calledfuncs, callers=callers)) except (OSError, EOFError, ValueError) as err: print(("Skipping counts file %r: %s" % (self.infile, err)), file=sys.stderr) diff --git a/Lib/traceback.py b/Lib/traceback.py index cf5f355ff04c3b..c43c4720ae5a15 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -586,12 +586,15 @@ def _extract_caret_anchors_from_line_segment(segment): if len(tree.body) != 1: return None + normalize = lambda offset: _byte_offset_to_character_offset(segment, offset) statement = tree.body[0] match statement: case ast.Expr(expr): match expr: case ast.BinOp(): - operator_str = segment[expr.left.end_col_offset:expr.right.col_offset] + operator_start = normalize(expr.left.end_col_offset) + operator_end = normalize(expr.right.col_offset) + operator_str = segment[operator_start:operator_end] operator_offset = len(operator_str) - len(operator_str.lstrip()) left_anchor = expr.left.end_col_offset + operator_offset @@ -601,9 +604,11 @@ def _extract_caret_anchors_from_line_segment(segment): and not operator_str[operator_offset + 1].isspace() ): right_anchor += 1 - return _Anchors(left_anchor, right_anchor) + return _Anchors(normalize(left_anchor), normalize(right_anchor)) case ast.Subscript(): - return _Anchors(expr.value.end_col_offset, expr.slice.end_col_offset + 1) + subscript_start = normalize(expr.value.end_col_offset) + subscript_end = normalize(expr.slice.end_col_offset + 1) + return _Anchors(subscript_start, subscript_end) return None @@ -1037,6 +1042,16 @@ def _compute_suggestion_error(exc_value, tb, wrong_name): + list(frame.f_globals) + list(frame.f_builtins) ) + + # Check first if we are in a method and the instance + # has the wrong name as attribute + if 'self' in frame.f_locals: + self = frame.f_locals['self'] + if hasattr(self, wrong_name): + return f"self.{wrong_name}" + + # Compute closest match + if len(d) > _MAX_CANDIDATE_ITEMS: return None wrong_name_len = len(wrong_name) diff --git a/Lib/typing.py b/Lib/typing.py index 233941598f76a3..d9d6fbcdb8f068 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -284,25 +284,6 @@ def _unpack_args(args): newargs.append(arg) return newargs -def _prepare_paramspec_params(cls, params): - """Prepares the parameters for a Generic containing ParamSpec - variables (internal helper). - """ - # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. - if (len(cls.__parameters__) == 1 - and params and not _is_param_expr(params[0])): - assert isinstance(cls.__parameters__[0], ParamSpec) - return (params,) - else: - _check_generic(cls, params, len(cls.__parameters__)) - _params = [] - # Convert lists to tuples to help other libraries cache the results. - for p, tvar in zip(params, cls.__parameters__): - if isinstance(tvar, ParamSpec) and isinstance(p, list): - p = tuple(p) - _params.append(p) - return tuple(_params) - def _deduplicate(params): # Weed out strict duplicates, preserving the first of each occurrence. all_params = set(params) @@ -344,6 +325,7 @@ def _flatten_literal_params(parameters): _cleanups = [] +_caches = {} def _tp_cache(func=None, /, *, typed=False): @@ -351,13 +333,20 @@ def _tp_cache(func=None, /, *, typed=False): original function for non-hashable arguments. """ def decorator(func): - cached = functools.lru_cache(typed=typed)(func) - _cleanups.append(cached.cache_clear) + # The callback 'inner' references the newly created lru_cache + # indirectly by performing a lookup in the global '_caches' dictionary. + # This breaks a reference that can be problematic when combined with + # C API extensions that leak references to types. See GH-98253. + + cache = functools.lru_cache(typed=typed)(func) + _caches[func] = cache + _cleanups.append(cache.cache_clear) + del cache @functools.wraps(func) def inner(*args, **kwds): try: - return cached(*args, **kwds) + return _caches[func](*args, **kwds) except TypeError: pass # All real errors (not unhashable args) are raised below. return func(*args, **kwds) @@ -1238,7 +1227,18 @@ def __typing_subst__(self, arg): return arg def __typing_prepare_subst__(self, alias, args): - return _prepare_paramspec_params(alias, args) + params = alias.__parameters__ + i = params.index(self) + if i >= len(args): + raise TypeError(f"Too few arguments for {alias}") + # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. + if len(params) == 1 and not _is_param_expr(args[0]): + assert i == 0 + args = (args,) + # Convert lists to tuples to help other libraries cache the results. + elif isinstance(args[i], list): + args = (*args[:i], tuple(args[i]), *args[i+1:]) + return args def _is_dunder(attr): return attr.startswith('__') and attr.endswith('__') @@ -1801,23 +1801,13 @@ def __class_getitem__(cls, params): if not isinstance(params, tuple): params = (params,) - if not params: - # We're only ok with `params` being empty if the class's only type - # parameter is a `TypeVarTuple` (which can contain zero types). - class_params = getattr(cls, "__parameters__", None) - only_class_parameter_is_typevartuple = ( - class_params is not None - and len(class_params) == 1 - and isinstance(class_params[0], TypeVarTuple) - ) - if not only_class_parameter_is_typevartuple: - raise TypeError( - f"Parameter list to {cls.__qualname__}[...] cannot be empty" - ) - params = tuple(_type_convert(p) for p in params) if cls in (Generic, Protocol): # Generic and Protocol can only be subscripted with unique type variables. + if not params: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty" + ) if not all(_is_typevar_like(p) for p in params): raise TypeError( f"Parameters to {cls.__name__}[...] must all be type variables " @@ -1827,13 +1817,20 @@ def __class_getitem__(cls, params): f"Parameters to {cls.__name__}[...] must all be unique") else: # Subscripting a regular Generic subclass. - if any(isinstance(t, ParamSpec) for t in cls.__parameters__): - params = _prepare_paramspec_params(cls, params) - elif not any(isinstance(p, TypeVarTuple) for p in cls.__parameters__): - # We only run this if there are no TypeVarTuples, because we - # don't check variadic generic arity at runtime (to reduce - # complexity of typing.py). - _check_generic(cls, params, len(cls.__parameters__)) + for param in cls.__parameters__: + prepare = getattr(param, '__typing_prepare_subst__', None) + if prepare is not None: + params = prepare(cls, params) + _check_generic(cls, params, len(cls.__parameters__)) + + new_args = [] + for param, new_arg in zip(cls.__parameters__, params): + if isinstance(param, TypeVarTuple): + new_args.extend(new_arg) + else: + new_args.append(new_arg) + params = tuple(new_args) + return _GenericAlias(cls, params, _paramspec_tvars=True) @@ -3366,6 +3363,7 @@ def dataclass_transform( eq_default: bool = True, order_default: bool = False, kw_only_default: bool = False, + frozen_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: Any, ) -> Callable[[T], T]: @@ -3419,6 +3417,8 @@ class CustomerModel(ModelBase): assumed to be True or False if it is omitted by the caller. - ``kw_only_default`` indicates whether the ``kw_only`` parameter is assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. - ``field_specifiers`` specifies a static list of supported classes or functions that describe fields, similar to ``dataclasses.field()``. - Arbitrary other keyword arguments are accepted in order to allow for @@ -3435,6 +3435,7 @@ def decorator(cls_or_fn): "eq_default": eq_default, "order_default": order_default, "kw_only_default": kw_only_default, + "frozen_default": frozen_default, "field_specifiers": field_specifiers, "kwargs": kwargs, } diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index b01f6605e23e39..5167c5f843f085 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -384,11 +384,11 @@ class TestCase(object): # of difflib. See #11763. _diffThreshold = 2**16 - # Attribute used by TestSuite for classSetUp - - _classSetupFailed = False - - _class_cleanups = [] + def __init_subclass__(cls, *args, **kwargs): + # Attribute used by TestSuite for classSetUp + cls._classSetupFailed = False + cls._class_cleanups = [] + super().__init_subclass__(*args, **kwargs) def __init__(self, methodName='runTest'): """Create an instance of the class that will use the named test diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index b8f4e57f0b4927..a273753d6a0abb 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -1809,6 +1809,12 @@ def __init__(self, in_dict, values=(), clear=False, **kwargs): def __call__(self, f): if isinstance(f, type): return self.decorate_class(f) + if inspect.iscoroutinefunction(f): + return self.decorate_async_callable(f) + return self.decorate_callable(f) + + + def decorate_callable(self, f): @wraps(f) def _inner(*args, **kw): self._patch_dict() @@ -1820,6 +1826,18 @@ def _inner(*args, **kw): return _inner + def decorate_async_callable(self, f): + @wraps(f) + async def _inner(*args, **kw): + self._patch_dict() + try: + return await f(*args, **kw) + finally: + self._unpatch_dict() + + return _inner + + def decorate_class(self, klass): for attr in dir(klass): attr_value = getattr(klass, attr) @@ -2745,6 +2763,7 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, _new_parent=parent, **kwargs) mock._mock_children[entry] = new + new.return_value = child_klass() _check_signature(original, new, skipfirst=skipfirst) # so functions created with _set_signature become instance attributes, diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py index 3da7005e603f4a..5ca4c23238b419 100644 --- a/Lib/unittest/result.py +++ b/Lib/unittest/result.py @@ -196,6 +196,7 @@ def _clean_tracebacks(self, exctype, value, tb, test): ret = None first = True excs = [(exctype, value, tb)] + seen = {id(value)} # Detect loops in chained exceptions. while excs: (exctype, value, tb) = excs.pop() # Skip test runner traceback levels @@ -214,8 +215,9 @@ def _clean_tracebacks(self, exctype, value, tb, test): if value is not None: for c in (value.__cause__, value.__context__): - if c is not None: + if c is not None and id(c) not in seen: excs.append((type(c), c, c.__traceback__)) + seen.add(id(c)) return ret def _is_relevant_tb_level(self, tb): diff --git a/Lib/urllib/error.py b/Lib/urllib/error.py index 8cd901f13f8e49..feec0e7f848e46 100644 --- a/Lib/urllib/error.py +++ b/Lib/urllib/error.py @@ -10,7 +10,7 @@ an application may want to handle an exception like a regular response. """ - +import io import urllib.response __all__ = ['URLError', 'HTTPError', 'ContentTooShortError'] @@ -42,12 +42,9 @@ def __init__(self, url, code, msg, hdrs, fp): self.hdrs = hdrs self.fp = fp self.filename = url - # The addinfourl classes depend on fp being a valid file - # object. In some cases, the HTTPError may not have a valid - # file object. If this happens, the simplest workaround is to - # not initialize the base classes. - if fp is not None: - self.__super_init(fp, hdrs, url, code) + if fp is None: + fp = io.StringIO() + self.__super_init(fp, hdrs, url, code) def __str__(self): return 'HTTP Error %s: %s' % (self.code, self.msg) diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index 9a3102afd63b9c..5f95c5ff7f9c1c 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -460,7 +460,7 @@ def urlsplit(url, scheme='', allow_fragments=True): allow_fragments = bool(allow_fragments) netloc = query = fragment = '' i = url.find(':') - if i > 0: + if i > 0 and url[0].isascii() and url[0].isalpha(): for c in url[:i]: if c not in scheme_chars: break @@ -600,6 +600,9 @@ def urldefrag(url): def unquote_to_bytes(string): """unquote_to_bytes('abc%20def') -> b'abc def'.""" + return bytes(_unquote_impl(string)) + +def _unquote_impl(string: bytes | bytearray | str) -> bytes | bytearray: # Note: strings are encoded as UTF-8. This is only an issue if it contains # unescaped non-ASCII characters, which URIs should not. if not string: @@ -611,8 +614,8 @@ def unquote_to_bytes(string): bits = string.split(b'%') if len(bits) == 1: return string - res = [bits[0]] - append = res.append + res = bytearray(bits[0]) + append = res.extend # Delay the initialization of the table to not waste memory # if the function is never called global _hextobyte @@ -626,10 +629,20 @@ def unquote_to_bytes(string): except KeyError: append(b'%') append(item) - return b''.join(res) + return res _asciire = re.compile('([\x00-\x7f]+)') +def _generate_unquoted_parts(string, encoding, errors): + previous_match_end = 0 + for ascii_match in _asciire.finditer(string): + start, end = ascii_match.span() + yield string[previous_match_end:start] # Non-ASCII + # The ascii_match[1] group == string[start:end]. + yield _unquote_impl(ascii_match[1]).decode(encoding, errors) + previous_match_end = end + yield string[previous_match_end:] # Non-ASCII tail + def unquote(string, encoding='utf-8', errors='replace'): """Replace %xx escapes by their single-character equivalent. The optional encoding and errors parameters specify how to decode percent-encoded @@ -641,21 +654,16 @@ def unquote(string, encoding='utf-8', errors='replace'): unquote('abc%20def') -> 'abc def'. """ if isinstance(string, bytes): - return unquote_to_bytes(string).decode(encoding, errors) + return _unquote_impl(string).decode(encoding, errors) if '%' not in string: + # Is it a string-like object? string.split return string if encoding is None: encoding = 'utf-8' if errors is None: errors = 'replace' - bits = _asciire.split(string) - res = [bits[0]] - append = res.append - for i in range(1, len(bits), 2): - append(unquote_to_bytes(bits[i]).decode(encoding, errors)) - append(bits[i + 1]) - return ''.join(res) + return ''.join(_generate_unquoted_parts(string, encoding, errors)) def parse_qs(qs, keep_blank_values=False, strict_parsing=False, diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index 278aa3a14bfeea..151034e6a81bf9 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -265,10 +265,7 @@ def urlretrieve(url, filename=None, reporthook=None, data=None): if reporthook: reporthook(blocknum, bs, size) - while True: - block = fp.read(bs) - if not block: - break + while block := fp.read(bs): read += len(block) tfp.write(block) blocknum += 1 @@ -1847,10 +1844,7 @@ def retrieve(self, url, filename=None, reporthook=None, data=None): size = int(headers["Content-Length"]) if reporthook: reporthook(blocknum, bs, size) - while 1: - block = fp.read(bs) - if not block: - break + while block := fp.read(bs): read += len(block) tfp.write(block) blocknum += 1 diff --git a/Lib/uuid.py b/Lib/uuid.py index 8fe2479f3f22cf..e863b631877f6d 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -371,7 +371,12 @@ def _get_command_stdout(command, *args): # for are actually localized, but in theory some system could do so.) env = dict(os.environ) env['LC_ALL'] = 'C' - proc = subprocess.Popen((executable,) + args, + # Empty strings will be quoted by popen so we should just ommit it + if args != ('',): + command = (executable, *args) + else: + command = (executable,) + proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, env=env) @@ -511,7 +516,7 @@ def _ifconfig_getnode(): mac = _find_mac_near_keyword('ifconfig', args, keywords, lambda i: i+1) if mac: return mac - return None + return None def _ip_getnode(): """Get the hardware address on Unix by running ip.""" diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py index 7bfc2d1b6fe057..978c98336f2b37 100644 --- a/Lib/venv/__init__.py +++ b/Lib/venv/__init__.py @@ -223,7 +223,7 @@ def symlink_or_copy(self, src, dst, relative_symlinks_ok=False): force_copy = not self.symlinks if not force_copy: try: - if not os.path.islink(dst): # can't link to itself! + if not os.path.islink(dst): # can't link to itself! if relative_symlinks_ok: assert os.path.dirname(src) == os.path.dirname(dst) os.symlink(os.path.basename(src), dst) @@ -418,11 +418,11 @@ def install_scripts(self, context, path): binpath = context.bin_path plen = len(path) for root, dirs, files in os.walk(path): - if root == path: # at top-level, remove irrelevant dirs + if root == path: # at top-level, remove irrelevant dirs for d in dirs[:]: if d not in ('common', os.name): dirs.remove(d) - continue # ignore files in top level + continue # ignore files in top level for f in files: if (os.name == 'nt' and f.startswith('python') and f.endswith(('.exe', '.pdb'))): @@ -468,83 +468,76 @@ def create(env_dir, system_site_packages=False, clear=False, prompt=prompt, upgrade_deps=upgrade_deps) builder.create(env_dir) + def main(args=None): - compatible = True - if sys.version_info < (3, 3): - compatible = False - elif not hasattr(sys, 'base_prefix'): - compatible = False - if not compatible: - raise ValueError('This script is only for use with Python >= 3.3') + import argparse + + parser = argparse.ArgumentParser(prog=__name__, + description='Creates virtual Python ' + 'environments in one or ' + 'more target ' + 'directories.', + epilog='Once an environment has been ' + 'created, you may wish to ' + 'activate it, e.g. by ' + 'sourcing an activate script ' + 'in its bin directory.') + parser.add_argument('dirs', metavar='ENV_DIR', nargs='+', + help='A directory to create the environment in.') + parser.add_argument('--system-site-packages', default=False, + action='store_true', dest='system_site', + help='Give the virtual environment access to the ' + 'system site-packages dir.') + if os.name == 'nt': + use_symlinks = False else: - import argparse - - parser = argparse.ArgumentParser(prog=__name__, - description='Creates virtual Python ' - 'environments in one or ' - 'more target ' - 'directories.', - epilog='Once an environment has been ' - 'created, you may wish to ' - 'activate it, e.g. by ' - 'sourcing an activate script ' - 'in its bin directory.') - parser.add_argument('dirs', metavar='ENV_DIR', nargs='+', - help='A directory to create the environment in.') - parser.add_argument('--system-site-packages', default=False, - action='store_true', dest='system_site', - help='Give the virtual environment access to the ' - 'system site-packages dir.') - if os.name == 'nt': - use_symlinks = False - else: - use_symlinks = True - group = parser.add_mutually_exclusive_group() - group.add_argument('--symlinks', default=use_symlinks, - action='store_true', dest='symlinks', - help='Try to use symlinks rather than copies, ' - 'when symlinks are not the default for ' - 'the platform.') - group.add_argument('--copies', default=not use_symlinks, - action='store_false', dest='symlinks', - help='Try to use copies rather than symlinks, ' - 'even when symlinks are the default for ' - 'the platform.') - parser.add_argument('--clear', default=False, action='store_true', - dest='clear', help='Delete the contents of the ' - 'environment directory if it ' - 'already exists, before ' - 'environment creation.') - parser.add_argument('--upgrade', default=False, action='store_true', - dest='upgrade', help='Upgrade the environment ' - 'directory to use this version ' - 'of Python, assuming Python ' - 'has been upgraded in-place.') - parser.add_argument('--without-pip', dest='with_pip', - default=True, action='store_false', - help='Skips installing or upgrading pip in the ' - 'virtual environment (pip is bootstrapped ' - 'by default)') - parser.add_argument('--prompt', - help='Provides an alternative prompt prefix for ' - 'this environment.') - parser.add_argument('--upgrade-deps', default=False, action='store_true', - dest='upgrade_deps', - help='Upgrade core dependencies: {} to the latest ' - 'version in PyPI'.format( - ' '.join(CORE_VENV_DEPS))) - options = parser.parse_args(args) - if options.upgrade and options.clear: - raise ValueError('you cannot supply --upgrade and --clear together.') - builder = EnvBuilder(system_site_packages=options.system_site, - clear=options.clear, - symlinks=options.symlinks, - upgrade=options.upgrade, - with_pip=options.with_pip, - prompt=options.prompt, - upgrade_deps=options.upgrade_deps) - for d in options.dirs: - builder.create(d) + use_symlinks = True + group = parser.add_mutually_exclusive_group() + group.add_argument('--symlinks', default=use_symlinks, + action='store_true', dest='symlinks', + help='Try to use symlinks rather than copies, ' + 'when symlinks are not the default for ' + 'the platform.') + group.add_argument('--copies', default=not use_symlinks, + action='store_false', dest='symlinks', + help='Try to use copies rather than symlinks, ' + 'even when symlinks are the default for ' + 'the platform.') + parser.add_argument('--clear', default=False, action='store_true', + dest='clear', help='Delete the contents of the ' + 'environment directory if it ' + 'already exists, before ' + 'environment creation.') + parser.add_argument('--upgrade', default=False, action='store_true', + dest='upgrade', help='Upgrade the environment ' + 'directory to use this version ' + 'of Python, assuming Python ' + 'has been upgraded in-place.') + parser.add_argument('--without-pip', dest='with_pip', + default=True, action='store_false', + help='Skips installing or upgrading pip in the ' + 'virtual environment (pip is bootstrapped ' + 'by default)') + parser.add_argument('--prompt', + help='Provides an alternative prompt prefix for ' + 'this environment.') + parser.add_argument('--upgrade-deps', default=False, action='store_true', + dest='upgrade_deps', + help=f'Upgrade core dependencies: {" ".join(CORE_VENV_DEPS)} ' + 'to the latest version in PyPI') + options = parser.parse_args(args) + if options.upgrade and options.clear: + raise ValueError('you cannot supply --upgrade and --clear together.') + builder = EnvBuilder(system_site_packages=options.system_site, + clear=options.clear, + symlinks=options.symlinks, + upgrade=options.upgrade, + with_pip=options.with_pip, + prompt=options.prompt, + upgrade_deps=options.upgrade_deps) + for d in options.dirs: + builder.create(d) + if __name__ == '__main__': rc = 1 diff --git a/Lib/wsgiref/handlers.py b/Lib/wsgiref/handlers.py index cd0916dc5553fb..cafe872c7aae9b 100644 --- a/Lib/wsgiref/handlers.py +++ b/Lib/wsgiref/handlers.py @@ -475,10 +475,7 @@ def _write(self,data): from warnings import warn warn("SimpleHandler.stdout.write() should not do partial writes", DeprecationWarning) - while True: - data = data[result:] - if not data: - break + while data := data[result:]: result = self.stdout.write(data) def _flush(self): diff --git a/Lib/wsgiref/validate.py b/Lib/wsgiref/validate.py index 6044e320a474c6..1a1853cd63a0d2 100644 --- a/Lib/wsgiref/validate.py +++ b/Lib/wsgiref/validate.py @@ -214,10 +214,7 @@ def readlines(self, *args): return lines def __iter__(self): - while 1: - line = self.readline() - if not line: - return + while line := self.readline(): yield line def close(self): diff --git a/Lib/xdrlib.py b/Lib/xdrlib.py index b56ffa59b73dcb..f8c2c18228da4d 100644 --- a/Lib/xdrlib.py +++ b/Lib/xdrlib.py @@ -224,9 +224,7 @@ def unpack_string(self): def unpack_list(self, unpack_item): list = [] - while 1: - x = self.unpack_uint() - if x == 0: break + while (x := self.unpack_uint()) != 0: if x != 1: raise ConversionError('0 or 1 expected, got %r' % (x,)) item = unpack_item() diff --git a/Lib/xml/dom/expatbuilder.py b/Lib/xml/dom/expatbuilder.py index 199c22d0af347e..7dd667bf3fbe04 100644 --- a/Lib/xml/dom/expatbuilder.py +++ b/Lib/xml/dom/expatbuilder.py @@ -200,10 +200,7 @@ def parseFile(self, file): parser = self.getParser() first_buffer = True try: - while 1: - buffer = file.read(16*1024) - if not buffer: - break + while buffer := file.read(16*1024): parser.Parse(buffer, False) if first_buffer and self.document.documentElement: self._setup_subset(buffer) diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py index ebbe2b703bfd8f..df5d5191126ae1 100644 --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -566,10 +566,7 @@ def parse(self, source, parser=None): # it with chunks. self._root = parser._parse_whole(source) return self._root - while True: - data = source.read(65536) - if not data: - break + while data := source.read(65536): parser.feed(data) self._root = parser.close() return self._root diff --git a/Lib/xml/sax/xmlreader.py b/Lib/xml/sax/xmlreader.py index 716f22840414e6..e906121d23b9ef 100644 --- a/Lib/xml/sax/xmlreader.py +++ b/Lib/xml/sax/xmlreader.py @@ -120,10 +120,8 @@ def parse(self, source): file = source.getCharacterStream() if file is None: file = source.getByteStream() - buffer = file.read(self._bufsize) - while buffer: + while buffer := file.read(self._bufsize): self.feed(buffer) - buffer = file.read(self._bufsize) self.close() def feed(self, data): diff --git a/Lib/xmlrpc/client.py b/Lib/xmlrpc/client.py index bef23f4505e03c..ea8da766cb5a7e 100644 --- a/Lib/xmlrpc/client.py +++ b/Lib/xmlrpc/client.py @@ -1339,10 +1339,7 @@ def parse_response(self, response): p, u = self.getparser() - while 1: - data = stream.read(1024) - if not data: - break + while data := stream.read(1024): if self.verbose: print("body:", repr(data)) p.feed(data) diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py index 0c4b558045a9f4..4dddb1d10e08bd 100644 --- a/Lib/xmlrpc/server.py +++ b/Lib/xmlrpc/server.py @@ -720,9 +720,7 @@ def markup(self, text, escape=None, funcs={}, classes={}, methods={}): r'RFC[- ]?(\d+)|' r'PEP[- ]?(\d+)|' r'(self\.)?((?:\w|\.)+))\b') - while 1: - match = pattern.search(text, here) - if not match: break + while match := pattern.search(text, here): start, end = match.span() results.append(escape(text[here:start])) diff --git a/Lib/zipfile.py b/Lib/zipfile/__init__.py similarity index 88% rename from Lib/zipfile.py rename to Lib/zipfile/__init__.py index 77b643caf9fc91..e1833dd1772d56 100644 --- a/Lib/zipfile.py +++ b/Lib/zipfile/__init__.py @@ -6,17 +6,13 @@ import binascii import importlib.util import io -import itertools import os -import posixpath import shutil import stat import struct import sys import threading import time -import contextlib -import pathlib try: import zlib # We may need its compression method @@ -1206,10 +1202,10 @@ def close(self): if not self._zip64: if self._file_size > ZIP64_LIMIT: raise RuntimeError( - 'File size unexpectedly exceeded ZIP64 limit') + 'File size too large, try using force_zip64') if self._compress_size > ZIP64_LIMIT: raise RuntimeError( - 'Compressed size unexpectedly exceeded ZIP64 limit') + 'Compressed size too large, try using force_zip64') # Seek backwards and write file header (which will now include # correct CRC and file sizes) @@ -2186,381 +2182,12 @@ def _compile(file, optimize=-1): return (fname, archivename) -def _parents(path): - """ - Given a path with elements separated by - posixpath.sep, generate all parents of that path. - - >>> list(_parents('b/d')) - ['b'] - >>> list(_parents('/b/d/')) - ['/b'] - >>> list(_parents('b/d/f/')) - ['b/d', 'b'] - >>> list(_parents('b')) - [] - >>> list(_parents('')) - [] - """ - return itertools.islice(_ancestry(path), 1, None) - - -def _ancestry(path): - """ - Given a path with elements separated by - posixpath.sep, generate all elements of that path - - >>> list(_ancestry('b/d')) - ['b/d', 'b'] - >>> list(_ancestry('/b/d/')) - ['/b/d', '/b'] - >>> list(_ancestry('b/d/f/')) - ['b/d/f', 'b/d', 'b'] - >>> list(_ancestry('b')) - ['b'] - >>> list(_ancestry('')) - [] - """ - path = path.rstrip(posixpath.sep) - while path and path != posixpath.sep: - yield path - path, tail = posixpath.split(path) - - -_dedupe = dict.fromkeys -"""Deduplicate an iterable in original order""" - - -def _difference(minuend, subtrahend): - """ - Return items in minuend not in subtrahend, retaining order - with O(1) lookup. - """ - return itertools.filterfalse(set(subtrahend).__contains__, minuend) - - -class CompleteDirs(ZipFile): - """ - A ZipFile subclass that ensures that implied directories - are always included in the namelist. - """ - - @staticmethod - def _implied_dirs(names): - parents = itertools.chain.from_iterable(map(_parents, names)) - as_dirs = (p + posixpath.sep for p in parents) - return _dedupe(_difference(as_dirs, names)) - - def namelist(self): - names = super(CompleteDirs, self).namelist() - return names + list(self._implied_dirs(names)) - - def _name_set(self): - return set(self.namelist()) - - def resolve_dir(self, name): - """ - If the name represents a directory, return that name - as a directory (with the trailing slash). - """ - names = self._name_set() - dirname = name + '/' - dir_match = name not in names and dirname in names - return dirname if dir_match else name - - @classmethod - def make(cls, source): - """ - Given a source (filename or zipfile), return an - appropriate CompleteDirs subclass. - """ - if isinstance(source, CompleteDirs): - return source - - if not isinstance(source, ZipFile): - return cls(source) - - # Only allow for FastLookup when supplied zipfile is read-only - if 'r' not in source.mode: - cls = CompleteDirs - - source.__class__ = cls - return source - - -class FastLookup(CompleteDirs): - """ - ZipFile subclass to ensure implicit - dirs exist and are resolved rapidly. - """ - - def namelist(self): - with contextlib.suppress(AttributeError): - return self.__names - self.__names = super(FastLookup, self).namelist() - return self.__names - - def _name_set(self): - with contextlib.suppress(AttributeError): - return self.__lookup - self.__lookup = super(FastLookup, self)._name_set() - return self.__lookup - - -class Path: - """ - A pathlib-compatible interface for zip files. - - Consider a zip file with this structure:: - - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt - - >>> data = io.BytesIO() - >>> zf = ZipFile(data, 'w') - >>> zf.writestr('a.txt', 'content of a') - >>> zf.writestr('b/c.txt', 'content of c') - >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'mem/abcde.zip' - - Path accepts the zipfile object itself or a filename - - >>> root = Path(zf) - - From there, several path operations are available. - - Directory iteration (including the zip file itself): +from ._path import ( # noqa: E402 + Path, - >>> a, b = root.iterdir() - >>> a - Path('mem/abcde.zip', 'a.txt') - >>> b - Path('mem/abcde.zip', 'b/') + # used privately for tests + CompleteDirs, # noqa: F401 +) - name property: - - >>> b.name - 'b' - - join with divide operator: - - >>> c = b / 'c.txt' - >>> c - Path('mem/abcde.zip', 'b/c.txt') - >>> c.name - 'c.txt' - - Read text: - - >>> c.read_text() - 'content of c' - - existence: - - >>> c.exists() - True - >>> (b / 'missing.txt').exists() - False - - Coercion to string: - - >>> import os - >>> str(c).replace(os.sep, posixpath.sep) - 'mem/abcde.zip/b/c.txt' - - At the root, ``name``, ``filename``, and ``parent`` - resolve to the zipfile. Note these attributes are not - valid and will raise a ``ValueError`` if the zipfile - has no filename. - - >>> root.name - 'abcde.zip' - >>> str(root.filename).replace(os.sep, posixpath.sep) - 'mem/abcde.zip' - >>> str(root.parent) - 'mem' - """ - - __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" - - def __init__(self, root, at=""): - """ - Construct a Path from a ZipFile or filename. - - Note: When the source is an existing ZipFile object, - its type (__class__) will be mutated to a - specialized type. If the caller wishes to retain the - original type, the caller should either create a - separate ZipFile object or pass a filename. - """ - self.root = FastLookup.make(root) - self.at = at - - def open(self, mode='r', *args, pwd=None, **kwargs): - """ - Open this entry as text or binary following the semantics - of ``pathlib.Path.open()`` by passing arguments through - to io.TextIOWrapper(). - """ - if self.is_dir(): - raise IsADirectoryError(self) - zip_mode = mode[0] - if not self.exists() and zip_mode == 'r': - raise FileNotFoundError(self) - stream = self.root.open(self.at, zip_mode, pwd=pwd) - if 'b' in mode: - if args or kwargs: - raise ValueError("encoding args invalid for binary operation") - return stream - else: - kwargs["encoding"] = io.text_encoding(kwargs.get("encoding")) - return io.TextIOWrapper(stream, *args, **kwargs) - - @property - def name(self): - return pathlib.Path(self.at).name or self.filename.name - - @property - def suffix(self): - return pathlib.Path(self.at).suffix or self.filename.suffix - - @property - def suffixes(self): - return pathlib.Path(self.at).suffixes or self.filename.suffixes - - @property - def stem(self): - return pathlib.Path(self.at).stem or self.filename.stem - - @property - def filename(self): - return pathlib.Path(self.root.filename).joinpath(self.at) - - def read_text(self, *args, **kwargs): - kwargs["encoding"] = io.text_encoding(kwargs.get("encoding")) - with self.open('r', *args, **kwargs) as strm: - return strm.read() - - def read_bytes(self): - with self.open('rb') as strm: - return strm.read() - - def _is_child(self, path): - return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") - - def _next(self, at): - return self.__class__(self.root, at) - - def is_dir(self): - return not self.at or self.at.endswith("/") - - def is_file(self): - return self.exists() and not self.is_dir() - - def exists(self): - return self.at in self.root._name_set() - - def iterdir(self): - if not self.is_dir(): - raise ValueError("Can't listdir a file") - subs = map(self._next, self.root.namelist()) - return filter(self._is_child, subs) - - def __str__(self): - return posixpath.join(self.root.filename, self.at) - - def __repr__(self): - return self.__repr.format(self=self) - - def joinpath(self, *other): - next = posixpath.join(self.at, *other) - return self._next(self.root.resolve_dir(next)) - - __truediv__ = joinpath - - @property - def parent(self): - if not self.at: - return self.filename.parent - parent_at = posixpath.dirname(self.at.rstrip('/')) - if parent_at: - parent_at += '/' - return self._next(parent_at) - - -def main(args=None): - import argparse - - description = 'A simple command-line interface for zipfile module.' - parser = argparse.ArgumentParser(description=description) - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument('-l', '--list', metavar='<zipfile>', - help='Show listing of a zipfile') - group.add_argument('-e', '--extract', nargs=2, - metavar=('<zipfile>', '<output_dir>'), - help='Extract zipfile into target dir') - group.add_argument('-c', '--create', nargs='+', - metavar=('<name>', '<file>'), - help='Create zipfile from sources') - group.add_argument('-t', '--test', metavar='<zipfile>', - help='Test if a zipfile is valid') - parser.add_argument('--metadata-encoding', metavar='<encoding>', - help='Specify encoding of member names for -l, -e and -t') - args = parser.parse_args(args) - - encoding = args.metadata_encoding - - if args.test is not None: - src = args.test - with ZipFile(src, 'r', metadata_encoding=encoding) as zf: - badfile = zf.testzip() - if badfile: - print("The following enclosed file is corrupted: {!r}".format(badfile)) - print("Done testing") - - elif args.list is not None: - src = args.list - with ZipFile(src, 'r', metadata_encoding=encoding) as zf: - zf.printdir() - - elif args.extract is not None: - src, curdir = args.extract - with ZipFile(src, 'r', metadata_encoding=encoding) as zf: - zf.extractall(curdir) - - elif args.create is not None: - if encoding: - print("Non-conforming encodings not supported with -c.", - file=sys.stderr) - sys.exit(1) - - zip_name = args.create.pop(0) - files = args.create - - def addToZip(zf, path, zippath): - if os.path.isfile(path): - zf.write(path, zippath, ZIP_DEFLATED) - elif os.path.isdir(path): - if zippath: - zf.write(path, zippath) - for nm in sorted(os.listdir(path)): - addToZip(zf, - os.path.join(path, nm), os.path.join(zippath, nm)) - # else: ignore - - with ZipFile(zip_name, 'w') as zf: - for path in files: - zippath = os.path.basename(path) - if not zippath: - zippath = os.path.basename(os.path.dirname(path)) - if zippath in ('', os.curdir, os.pardir): - zippath = '' - addToZip(zf, path, zippath) - - -if __name__ == "__main__": - main() +# used privately for tests +from .__main__ import main # noqa: F401, E402 diff --git a/Lib/zipfile/__main__.py b/Lib/zipfile/__main__.py new file mode 100644 index 00000000000000..a9e5fb1b8d72c4 --- /dev/null +++ b/Lib/zipfile/__main__.py @@ -0,0 +1,77 @@ +import sys +import os +from . import ZipFile, ZIP_DEFLATED + + +def main(args=None): + import argparse + + description = 'A simple command-line interface for zipfile module.' + parser = argparse.ArgumentParser(description=description) + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument('-l', '--list', metavar='<zipfile>', + help='Show listing of a zipfile') + group.add_argument('-e', '--extract', nargs=2, + metavar=('<zipfile>', '<output_dir>'), + help='Extract zipfile into target dir') + group.add_argument('-c', '--create', nargs='+', + metavar=('<name>', '<file>'), + help='Create zipfile from sources') + group.add_argument('-t', '--test', metavar='<zipfile>', + help='Test if a zipfile is valid') + parser.add_argument('--metadata-encoding', metavar='<encoding>', + help='Specify encoding of member names for -l, -e and -t') + args = parser.parse_args(args) + + encoding = args.metadata_encoding + + if args.test is not None: + src = args.test + with ZipFile(src, 'r', metadata_encoding=encoding) as zf: + badfile = zf.testzip() + if badfile: + print("The following enclosed file is corrupted: {!r}".format(badfile)) + print("Done testing") + + elif args.list is not None: + src = args.list + with ZipFile(src, 'r', metadata_encoding=encoding) as zf: + zf.printdir() + + elif args.extract is not None: + src, curdir = args.extract + with ZipFile(src, 'r', metadata_encoding=encoding) as zf: + zf.extractall(curdir) + + elif args.create is not None: + if encoding: + print("Non-conforming encodings not supported with -c.", + file=sys.stderr) + sys.exit(1) + + zip_name = args.create.pop(0) + files = args.create + + def addToZip(zf, path, zippath): + if os.path.isfile(path): + zf.write(path, zippath, ZIP_DEFLATED) + elif os.path.isdir(path): + if zippath: + zf.write(path, zippath) + for nm in sorted(os.listdir(path)): + addToZip(zf, + os.path.join(path, nm), os.path.join(zippath, nm)) + # else: ignore + + with ZipFile(zip_name, 'w') as zf: + for path in files: + zippath = os.path.basename(path) + if not zippath: + zippath = os.path.basename(os.path.dirname(path)) + if zippath in ('', os.curdir, os.pardir): + zippath = '' + addToZip(zf, path, zippath) + + +if __name__ == "__main__": + main() diff --git a/Lib/zipfile/_path.py b/Lib/zipfile/_path.py new file mode 100644 index 00000000000000..aea17b65b6aa2d --- /dev/null +++ b/Lib/zipfile/_path.py @@ -0,0 +1,333 @@ +import io +import posixpath +import zipfile +import itertools +import contextlib +import pathlib + + +__all__ = ['Path'] + + +def _parents(path): + """ + Given a path with elements separated by + posixpath.sep, generate all parents of that path. + + >>> list(_parents('b/d')) + ['b'] + >>> list(_parents('/b/d/')) + ['/b'] + >>> list(_parents('b/d/f/')) + ['b/d', 'b'] + >>> list(_parents('b')) + [] + >>> list(_parents('')) + [] + """ + return itertools.islice(_ancestry(path), 1, None) + + +def _ancestry(path): + """ + Given a path with elements separated by + posixpath.sep, generate all elements of that path + + >>> list(_ancestry('b/d')) + ['b/d', 'b'] + >>> list(_ancestry('/b/d/')) + ['/b/d', '/b'] + >>> list(_ancestry('b/d/f/')) + ['b/d/f', 'b/d', 'b'] + >>> list(_ancestry('b')) + ['b'] + >>> list(_ancestry('')) + [] + """ + path = path.rstrip(posixpath.sep) + while path and path != posixpath.sep: + yield path + path, tail = posixpath.split(path) + + +_dedupe = dict.fromkeys +"""Deduplicate an iterable in original order""" + + +def _difference(minuend, subtrahend): + """ + Return items in minuend not in subtrahend, retaining order + with O(1) lookup. + """ + return itertools.filterfalse(set(subtrahend).__contains__, minuend) + + +class InitializedState: + """ + Mix-in to save the initialization state for pickling. + """ + + def __init__(self, *args, **kwargs): + self.__args = args + self.__kwargs = kwargs + super().__init__(*args, **kwargs) + + def __getstate__(self): + return self.__args, self.__kwargs + + def __setstate__(self, state): + args, kwargs = state + super().__init__(*args, **kwargs) + + +class CompleteDirs(InitializedState, zipfile.ZipFile): + """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + """ + + @staticmethod + def _implied_dirs(names): + parents = itertools.chain.from_iterable(map(_parents, names)) + as_dirs = (p + posixpath.sep for p in parents) + return _dedupe(_difference(as_dirs, names)) + + def namelist(self): + names = super(CompleteDirs, self).namelist() + return names + list(self._implied_dirs(names)) + + def _name_set(self): + return set(self.namelist()) + + def resolve_dir(self, name): + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + names = self._name_set() + dirname = name + '/' + dir_match = name not in names and dirname in names + return dirname if dir_match else name + + @classmethod + def make(cls, source): + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + if isinstance(source, CompleteDirs): + return source + + if not isinstance(source, zipfile.ZipFile): + return cls(source) + + # Only allow for FastLookup when supplied zipfile is read-only + if 'r' not in source.mode: + cls = CompleteDirs + + source.__class__ = cls + return source + + +class FastLookup(CompleteDirs): + """ + ZipFile subclass to ensure implicit + dirs exist and are resolved rapidly. + """ + + def namelist(self): + with contextlib.suppress(AttributeError): + return self.__names + self.__names = super(FastLookup, self).namelist() + return self.__names + + def _name_set(self): + with contextlib.suppress(AttributeError): + return self.__lookup + self.__lookup = super(FastLookup, self)._name_set() + return self.__lookup + + +class Path: + """ + A pathlib-compatible interface for zip files. + + Consider a zip file with this structure:: + + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt + + >>> data = io.BytesIO() + >>> zf = ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'mem/abcde.zip' + + Path accepts the zipfile object itself or a filename + + >>> root = Path(zf) + + From there, several path operations are available. + + Directory iteration (including the zip file itself): + + >>> a, b = root.iterdir() + >>> a + Path('mem/abcde.zip', 'a.txt') + >>> b + Path('mem/abcde.zip', 'b/') + + name property: + + >>> b.name + 'b' + + join with divide operator: + + >>> c = b / 'c.txt' + >>> c + Path('mem/abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' + + Read text: + + >>> c.read_text() + 'content of c' + + existence: + + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False + + Coercion to string: + + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' + + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. Note these attributes are not + valid and will raise a ``ValueError`` if the zipfile + has no filename. + + >>> root.name + 'abcde.zip' + >>> str(root.filename).replace(os.sep, posixpath.sep) + 'mem/abcde.zip' + >>> str(root.parent) + 'mem' + """ + + __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" + + def __init__(self, root, at=""): + """ + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ + self.root = FastLookup.make(root) + self.at = at + + def open(self, mode='r', *args, pwd=None, **kwargs): + """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + if self.is_dir(): + raise IsADirectoryError(self) + zip_mode = mode[0] + if not self.exists() and zip_mode == 'r': + raise FileNotFoundError(self) + stream = self.root.open(self.at, zip_mode, pwd=pwd) + if 'b' in mode: + if args or kwargs: + raise ValueError("encoding args invalid for binary operation") + return stream + else: + kwargs["encoding"] = io.text_encoding(kwargs.get("encoding")) + return io.TextIOWrapper(stream, *args, **kwargs) + + @property + def name(self): + return pathlib.Path(self.at).name or self.filename.name + + @property + def suffix(self): + return pathlib.Path(self.at).suffix or self.filename.suffix + + @property + def suffixes(self): + return pathlib.Path(self.at).suffixes or self.filename.suffixes + + @property + def stem(self): + return pathlib.Path(self.at).stem or self.filename.stem + + @property + def filename(self): + return pathlib.Path(self.root.filename).joinpath(self.at) + + def read_text(self, *args, **kwargs): + kwargs["encoding"] = io.text_encoding(kwargs.get("encoding")) + with self.open('r', *args, **kwargs) as strm: + return strm.read() + + def read_bytes(self): + with self.open('rb') as strm: + return strm.read() + + def _is_child(self, path): + return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") + + def _next(self, at): + return self.__class__(self.root, at) + + def is_dir(self): + return not self.at or self.at.endswith("/") + + def is_file(self): + return self.exists() and not self.is_dir() + + def exists(self): + return self.at in self.root._name_set() + + def iterdir(self): + if not self.is_dir(): + raise ValueError("Can't listdir a file") + subs = map(self._next, self.root.namelist()) + return filter(self._is_child, subs) + + def __str__(self): + return posixpath.join(self.root.filename, self.at) + + def __repr__(self): + return self.__repr.format(self=self) + + def joinpath(self, *other): + next = posixpath.join(self.at, *other) + return self._next(self.root.resolve_dir(next)) + + __truediv__ = joinpath + + @property + def parent(self): + if not self.at: + return self.filename.parent + parent_at = posixpath.dirname(self.at.rstrip('/')) + if parent_at: + parent_at += '/' + return self._next(parent_at) diff --git a/Lib/zipimport.py b/Lib/zipimport.py index 016f1b8a797988..a7333a4c4906de 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -347,114 +347,121 @@ def _read_directory(archive): raise ZipImportError(f"can't open Zip file: {archive!r}", path=archive) with fp: + # GH-87235: On macOS all file descriptors for /dev/fd/N share the same + # file offset, reset the file offset after scanning the zipfile diretory + # to not cause problems when some runs 'python3 /dev/fd/9 9<some_script' + start_offset = fp.tell() try: - fp.seek(-END_CENTRAL_DIR_SIZE, 2) - header_position = fp.tell() - buffer = fp.read(END_CENTRAL_DIR_SIZE) - except OSError: - raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) - if len(buffer) != END_CENTRAL_DIR_SIZE: - raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) - if buffer[:4] != STRING_END_ARCHIVE: - # Bad: End of Central Dir signature - # Check if there's a comment. try: - fp.seek(0, 2) - file_size = fp.tell() - except OSError: - raise ZipImportError(f"can't read Zip file: {archive!r}", - path=archive) - max_comment_start = max(file_size - MAX_COMMENT_LEN - - END_CENTRAL_DIR_SIZE, 0) - try: - fp.seek(max_comment_start) - data = fp.read() - except OSError: - raise ZipImportError(f"can't read Zip file: {archive!r}", - path=archive) - pos = data.rfind(STRING_END_ARCHIVE) - if pos < 0: - raise ZipImportError(f'not a Zip file: {archive!r}', - path=archive) - buffer = data[pos:pos+END_CENTRAL_DIR_SIZE] - if len(buffer) != END_CENTRAL_DIR_SIZE: - raise ZipImportError(f"corrupt Zip file: {archive!r}", - path=archive) - header_position = file_size - len(data) + pos - - header_size = _unpack_uint32(buffer[12:16]) - header_offset = _unpack_uint32(buffer[16:20]) - if header_position < header_size: - raise ZipImportError(f'bad central directory size: {archive!r}', path=archive) - if header_position < header_offset: - raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive) - header_position -= header_size - arc_offset = header_position - header_offset - if arc_offset < 0: - raise ZipImportError(f'bad central directory size or offset: {archive!r}', path=archive) - - files = {} - # Start of Central Directory - count = 0 - try: - fp.seek(header_position) - except OSError: - raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) - while True: - buffer = fp.read(46) - if len(buffer) < 4: - raise EOFError('EOF read where not expected') - # Start of file header - if buffer[:4] != b'PK\x01\x02': - break # Bad: Central Dir File Header - if len(buffer) != 46: - raise EOFError('EOF read where not expected') - flags = _unpack_uint16(buffer[8:10]) - compress = _unpack_uint16(buffer[10:12]) - time = _unpack_uint16(buffer[12:14]) - date = _unpack_uint16(buffer[14:16]) - crc = _unpack_uint32(buffer[16:20]) - data_size = _unpack_uint32(buffer[20:24]) - file_size = _unpack_uint32(buffer[24:28]) - name_size = _unpack_uint16(buffer[28:30]) - extra_size = _unpack_uint16(buffer[30:32]) - comment_size = _unpack_uint16(buffer[32:34]) - file_offset = _unpack_uint32(buffer[42:46]) - header_size = name_size + extra_size + comment_size - if file_offset > header_offset: - raise ZipImportError(f'bad local header offset: {archive!r}', path=archive) - file_offset += arc_offset - - try: - name = fp.read(name_size) + fp.seek(-END_CENTRAL_DIR_SIZE, 2) + header_position = fp.tell() + buffer = fp.read(END_CENTRAL_DIR_SIZE) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) - if len(name) != name_size: + if len(buffer) != END_CENTRAL_DIR_SIZE: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) - # On Windows, calling fseek to skip over the fields we don't use is - # slower than reading the data because fseek flushes stdio's - # internal buffers. See issue #8745. + if buffer[:4] != STRING_END_ARCHIVE: + # Bad: End of Central Dir signature + # Check if there's a comment. + try: + fp.seek(0, 2) + file_size = fp.tell() + except OSError: + raise ZipImportError(f"can't read Zip file: {archive!r}", + path=archive) + max_comment_start = max(file_size - MAX_COMMENT_LEN - + END_CENTRAL_DIR_SIZE, 0) + try: + fp.seek(max_comment_start) + data = fp.read() + except OSError: + raise ZipImportError(f"can't read Zip file: {archive!r}", + path=archive) + pos = data.rfind(STRING_END_ARCHIVE) + if pos < 0: + raise ZipImportError(f'not a Zip file: {archive!r}', + path=archive) + buffer = data[pos:pos+END_CENTRAL_DIR_SIZE] + if len(buffer) != END_CENTRAL_DIR_SIZE: + raise ZipImportError(f"corrupt Zip file: {archive!r}", + path=archive) + header_position = file_size - len(data) + pos + + header_size = _unpack_uint32(buffer[12:16]) + header_offset = _unpack_uint32(buffer[16:20]) + if header_position < header_size: + raise ZipImportError(f'bad central directory size: {archive!r}', path=archive) + if header_position < header_offset: + raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive) + header_position -= header_size + arc_offset = header_position - header_offset + if arc_offset < 0: + raise ZipImportError(f'bad central directory size or offset: {archive!r}', path=archive) + + files = {} + # Start of Central Directory + count = 0 try: - if len(fp.read(header_size - name_size)) != header_size - name_size: - raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) + fp.seek(header_position) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) + while True: + buffer = fp.read(46) + if len(buffer) < 4: + raise EOFError('EOF read where not expected') + # Start of file header + if buffer[:4] != b'PK\x01\x02': + break # Bad: Central Dir File Header + if len(buffer) != 46: + raise EOFError('EOF read where not expected') + flags = _unpack_uint16(buffer[8:10]) + compress = _unpack_uint16(buffer[10:12]) + time = _unpack_uint16(buffer[12:14]) + date = _unpack_uint16(buffer[14:16]) + crc = _unpack_uint32(buffer[16:20]) + data_size = _unpack_uint32(buffer[20:24]) + file_size = _unpack_uint32(buffer[24:28]) + name_size = _unpack_uint16(buffer[28:30]) + extra_size = _unpack_uint16(buffer[30:32]) + comment_size = _unpack_uint16(buffer[32:34]) + file_offset = _unpack_uint32(buffer[42:46]) + header_size = name_size + extra_size + comment_size + if file_offset > header_offset: + raise ZipImportError(f'bad local header offset: {archive!r}', path=archive) + file_offset += arc_offset - if flags & 0x800: - # UTF-8 file names extension - name = name.decode() - else: - # Historical ZIP filename encoding try: - name = name.decode('ascii') - except UnicodeDecodeError: - name = name.decode('latin1').translate(cp437_table) - - name = name.replace('/', path_sep) - path = _bootstrap_external._path_join(archive, name) - t = (path, compress, data_size, file_size, file_offset, time, date, crc) - files[name] = t - count += 1 + name = fp.read(name_size) + except OSError: + raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) + if len(name) != name_size: + raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) + # On Windows, calling fseek to skip over the fields we don't use is + # slower than reading the data because fseek flushes stdio's + # internal buffers. See issue #8745. + try: + if len(fp.read(header_size - name_size)) != header_size - name_size: + raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) + except OSError: + raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) + + if flags & 0x800: + # UTF-8 file names extension + name = name.decode() + else: + # Historical ZIP filename encoding + try: + name = name.decode('ascii') + except UnicodeDecodeError: + name = name.decode('latin1').translate(cp437_table) + + name = name.replace('/', path_sep) + path = _bootstrap_external._path_join(archive, name) + t = (path, compress, data_size, file_size, file_offset, time, date, crc) + files[name] = t + count += 1 + finally: + fp.seek(start_offset) _bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive) return files diff --git a/Mac/Extras.install.py b/Mac/Extras.install.py index ab1af71dfd823f..41bfa53d616307 100644 --- a/Mac/Extras.install.py +++ b/Mac/Extras.install.py @@ -9,10 +9,9 @@ debug = 0 def isclean(name): - if name == 'CVS': return 0 - if name == '.cvsignore': return 0 - if name == '.DS_store': return 0 - if name == '.svn': return 0 + if name in ('CVS', '.cvsignore', '.svn'): + return 0 + if name.lower() == '.ds_store': return 0 if name.endswith('~'): return 0 if name.endswith('.BAK'): return 0 if name.endswith('.pyc'): return 0 diff --git a/Mac/README.rst b/Mac/README.rst index 7476639d0ff541..bc40b41f7f38ad 100644 --- a/Mac/README.rst +++ b/Mac/README.rst @@ -10,6 +10,19 @@ Python on macOS README This document provides a quick overview of some macOS specific features in the Python distribution. +Compilers for building on macOS +=============================== + +The core developers primarily test builds on macOS with Apple's compiler tools, +either Xcode or the Command Line Tools. For these we only support building with +a compiler that includes an SDK that targets the OS on the build machine, that is +the version of Xcode that shipped with the OS version or one newer. + +For example, for macOS 12 we support Xcode 13 and Xcode 14 (or the corresponding +Command Line Tools). + +Building with other compilers, such as GCC, likely works, but is not actively supported. + macOS specific arguments to configure ===================================== diff --git a/Makefile.pre.in b/Makefile.pre.in index 9b35398c4fe767..dd6c3fbd1c6483 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -38,7 +38,6 @@ CC= @CC@ CXX= @CXX@ LINKCC= @LINKCC@ AR= @AR@ -READELF= @READELF@ SOABI= @SOABI@ LDVERSION= @LDVERSION@ LIBPYTHON= @LIBPYTHON@ @@ -977,7 +976,8 @@ Programs/_testembed: Programs/_testembed.o $(LINK_PYTHON_DEPS) BOOTSTRAP_HEADERS = \ Python/frozen_modules/importlib._bootstrap.h \ - Python/frozen_modules/importlib._bootstrap_external.h + Python/frozen_modules/importlib._bootstrap_external.h \ + Python/frozen_modules/zipimport.h Programs/_bootstrap_python.o: Programs/_bootstrap_python.c $(BOOTSTRAP_HEADERS) $(PYTHON_HEADERS) @@ -1225,7 +1225,7 @@ regen-limited-abi: all ############################################################################ # Regenerate all generated files -regen-all: regen-opcode regen-opcode-targets regen-typeslots \ +regen-all: regen-cases regen-opcode regen-opcode-targets regen-typeslots \ regen-token regen-ast regen-keyword regen-sre regen-frozen clinic \ regen-pegen-metaparser regen-pegen regen-test-frozenmain \ regen-test-levenshtein regen-global-objects @@ -1445,7 +1445,19 @@ regen-opcode-targets: $(srcdir)/Python/opcode_targets.h.new $(UPDATE_FILE) $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/opcode_targets.h.new -Python/ceval.o: $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/condvar.h +.PHONY: regen-cases +regen-cases: + # Regenerate Python/generated_cases.c.h from Python/bytecodes.c + # using Tools/cases_generator/generate_cases.py + PYTHONPATH=$(srcdir)/Tools/cases_generator \ + $(PYTHON_FOR_REGEN) \ + $(srcdir)/Tools/cases_generator/generate_cases.py \ + -i $(srcdir)/Python/bytecodes.c \ + -o $(srcdir)/Python/generated_cases.c.h.new + $(UPDATE_FILE) $(srcdir)/Python/generated_cases.c.h $(srcdir)/Python/generated_cases.c.h.new + +Python/ceval.o: $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/condvar.h $(srcdir)/Python/generated_cases.c.h + Python/frozen.o: $(FROZEN_FILES_OUT) @@ -1574,6 +1586,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/listobject.h \ $(srcdir)/Include/cpython/longintrepr.h \ $(srcdir)/Include/cpython/longobject.h \ + $(srcdir)/Include/cpython/memoryobject.h \ $(srcdir)/Include/cpython/methodobject.h \ $(srcdir)/Include/cpython/modsupport.h \ $(srcdir)/Include/cpython/object.h \ @@ -1611,14 +1624,17 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_bytesobject.h \ $(srcdir)/Include/internal/pycore_call.h \ $(srcdir)/Include/internal/pycore_ceval.h \ + $(srcdir)/Include/internal/pycore_ceval_state.h \ $(srcdir)/Include/internal/pycore_code.h \ $(srcdir)/Include/internal/pycore_compile.h \ $(srcdir)/Include/internal/pycore_condvar.h \ $(srcdir)/Include/internal/pycore_context.h \ $(srcdir)/Include/internal/pycore_dict.h \ + $(srcdir)/Include/internal/pycore_dict_state.h \ $(srcdir)/Include/internal/pycore_descrobject.h \ $(srcdir)/Include/internal/pycore_dtoa.h \ $(srcdir)/Include/internal/pycore_exceptions.h \ + $(srcdir)/Include/internal/pycore_faulthandler.h \ $(srcdir)/Include/internal/pycore_fileutils.h \ $(srcdir)/Include/internal/pycore_floatobject.h \ $(srcdir)/Include/internal/pycore_format.h \ @@ -1627,6 +1643,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_getopt.h \ $(srcdir)/Include/internal/pycore_gil.h \ $(srcdir)/Include/internal/pycore_global_objects.h \ + $(srcdir)/Include/internal/pycore_global_objects_fini_generated.h \ $(srcdir)/Include/internal/pycore_hamt.h \ $(srcdir)/Include/internal/pycore_hashtable.h \ $(srcdir)/Include/internal/pycore_import.h \ @@ -1638,13 +1655,17 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_moduleobject.h \ $(srcdir)/Include/internal/pycore_namespace.h \ $(srcdir)/Include/internal/pycore_object.h \ + $(srcdir)/Include/internal/pycore_obmalloc.h \ + $(srcdir)/Include/internal/pycore_obmalloc_init.h \ $(srcdir)/Include/internal/pycore_pathconfig.h \ $(srcdir)/Include/internal/pycore_pyarena.h \ $(srcdir)/Include/internal/pycore_pyerrors.h \ $(srcdir)/Include/internal/pycore_pyhash.h \ $(srcdir)/Include/internal/pycore_pylifecycle.h \ $(srcdir)/Include/internal/pycore_pymem.h \ + $(srcdir)/Include/internal/pycore_pymem_init.h \ $(srcdir)/Include/internal/pycore_pystate.h \ + $(srcdir)/Include/internal/pycore_pythread.h \ $(srcdir)/Include/internal/pycore_range.h \ $(srcdir)/Include/internal/pycore_runtime.h \ $(srcdir)/Include/internal/pycore_runtime_init_generated.h \ @@ -1655,13 +1676,16 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_structseq.h \ $(srcdir)/Include/internal/pycore_symtable.h \ $(srcdir)/Include/internal/pycore_sysmodule.h \ + $(srcdir)/Include/internal/pycore_time.h \ $(srcdir)/Include/internal/pycore_token.h \ $(srcdir)/Include/internal/pycore_traceback.h \ + $(srcdir)/Include/internal/pycore_tracemalloc.h \ $(srcdir)/Include/internal/pycore_tuple.h \ $(srcdir)/Include/internal/pycore_typeobject.h \ $(srcdir)/Include/internal/pycore_ucnhash.h \ $(srcdir)/Include/internal/pycore_unionobject.h \ $(srcdir)/Include/internal/pycore_unicodeobject.h \ + $(srcdir)/Include/internal/pycore_unicodeobject_generated.h \ $(srcdir)/Include/internal/pycore_warnings.h \ $(DTRACE_HEADERS) \ @PLATFORM_HEADERS@ \ @@ -1936,7 +1960,6 @@ LIBSUBDIRS= asyncio \ ctypes ctypes/macholib \ curses \ dbm \ - distutils distutils/command \ email email/mime \ encodings \ ensurepip ensurepip/_bundled \ @@ -1961,10 +1984,10 @@ LIBSUBDIRS= asyncio \ wsgiref \ $(XMLLIBSUBDIRS) \ xmlrpc \ + zipfile \ zoneinfo \ __phello__ -TESTSUBDIRS= distutils/tests \ - idlelib/idle_test \ +TESTSUBDIRS= idlelib/idle_test \ test test/audiodata \ test/capath test/cjkencodings \ test/data test/decimaltestdata \ @@ -2035,6 +2058,7 @@ TESTSUBDIRS= distutils/tests \ test/test_tools \ test/test_ttk \ test/test_warnings test/test_warnings/data \ + test/test_zipfile \ test/test_zoneinfo test/test_zoneinfo/data \ test/test_unittest test/test_unittest/testmock \ test/tracedmodules \ @@ -2042,6 +2066,8 @@ TESTSUBDIRS= distutils/tests \ test/xmltestdata test/xmltestdata/c14n-20 \ test/ziptestdata +COMPILEALL_OPTS=-j0 + TEST_MODULES=@TEST_MODULES@ libinstall: all $(srcdir)/Modules/xxmodule.c @for i in $(SCRIPTDIR) $(LIBDEST); \ @@ -2111,36 +2137,15 @@ libinstall: all $(srcdir)/Modules/xxmodule.c $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).py \ $(DESTDIR)$(LIBDEST); \ $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt - if test -d $(DESTDIR)$(LIBDEST)/distutils/tests; then \ - $(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \ - $(DESTDIR)$(LIBDEST)/distutils/tests ; \ - fi - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST) -f \ - -x 'bad_coding|badsyntax|site-packages|test/test_lib2to3/data' \ - $(DESTDIR)$(LIBDEST) + @ # Build PYC files for the 3 optimization levels (0, 1, 2) -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST) -f \ - -x 'bad_coding|badsyntax|site-packages|test/test_lib2to3/data' \ - $(DESTDIR)$(LIBDEST) - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST) -f \ + $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ + -o 0 -o 1 -o 2 $(COMPILEALL_OPTS) -d $(LIBDEST) -f \ -x 'bad_coding|badsyntax|site-packages|test/test_lib2to3/data' \ $(DESTDIR)$(LIBDEST) -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST)/site-packages -f \ - -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST)/site-packages -f \ - -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST)/site-packages -f \ + -o 0 -o 1 -o 2 $(COMPILEALL_OPTS) -d $(LIBDEST)/site-packages -f \ -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ $(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/Grammar.txt @@ -2438,7 +2443,7 @@ rmtestturds: -rm -f gb-18030-2000.xml docclean: - $(MAKE) -C Doc clean + $(MAKE) -C $(srcdir)/Doc clean # like the 'clean' target but retain the profile guided optimization (PGO) # data. The PGO data is only valid if source code remains unchanged. @@ -2577,17 +2582,19 @@ Python/thread.o: @THREADHEADERS@ $(srcdir)/Python/condvar.h ########################################################################## # Module dependencies and platform-specific files -MODULE_DEPS=$(PYTHON_HEADERS) Modules/config.c $(EXPORTSYMS) +# force rebuild when header file or module build flavor (static/shared) is changed +MODULE_DEPS_STATIC=Modules/config.c +MODULE_DEPS_SHARED=$(MODULE_DEPS_STATIC) $(EXPORTSYMS) MODULE_CMATH_DEPS=$(srcdir)/Modules/_math.h MODULE_MATH_DEPS=$(srcdir)/Modules/_math.h -MODULE_PYEXPAT_DEPS=$(LIBEXPAT_HEADERS) @LIBEXPAT_INTERNAL@ +MODULE_PYEXPAT_DEPS=@LIBEXPAT_INTERNAL@ MODULE_UNICODEDATA_DEPS=$(srcdir)/Modules/unicodedata_db.h $(srcdir)/Modules/unicodename_db.h MODULE__BLAKE2_DEPS=$(srcdir)/Modules/_blake2/impl/blake2-config.h $(srcdir)/Modules/_blake2/impl/blake2-impl.h $(srcdir)/Modules/_blake2/impl/blake2.h $(srcdir)/Modules/_blake2/impl/blake2b-load-sse2.h $(srcdir)/Modules/_blake2/impl/blake2b-load-sse41.h $(srcdir)/Modules/_blake2/impl/blake2b-ref.c $(srcdir)/Modules/_blake2/impl/blake2b-round.h $(srcdir)/Modules/_blake2/impl/blake2b.c $(srcdir)/Modules/_blake2/impl/blake2s-load-sse2.h $(srcdir)/Modules/_blake2/impl/blake2s-load-sse41.h $(srcdir)/Modules/_blake2/impl/blake2s-load-xop.h $(srcdir)/Modules/_blake2/impl/blake2s-ref.c $(srcdir)/Modules/_blake2/impl/blake2s-round.h $(srcdir)/Modules/_blake2/impl/blake2s.c $(srcdir)/Modules/_blake2/blake2module.h $(srcdir)/Modules/hashlib.h MODULE__CTYPES_DEPS=$(srcdir)/Modules/_ctypes/ctypes.h $(srcdir)/Modules/_ctypes/darwin/dlfcn.h MODULE__CTYPES_MALLOC_CLOSURE=@MODULE__CTYPES_MALLOC_CLOSURE@ -MODULE__DECIMAL_DEPS=$(srcdir)/Modules/_decimal/docstrings.h $(LIBMPDEC_HEADERS) @LIBMPDEC_INTERNAL@ -MODULE__ELEMENTTREE_DEPS=$(srcdir)/Modules/pyexpat.c $(LIBEXPAT_HEADERS) @LIBEXPAT_INTERNAL@ +MODULE__DECIMAL_DEPS=$(srcdir)/Modules/_decimal/docstrings.h @LIBMPDEC_INTERNAL@ +MODULE__ELEMENTTREE_DEPS=$(srcdir)/Modules/pyexpat.c @LIBEXPAT_INTERNAL@ MODULE__HASHLIB_DEPS=$(srcdir)/Modules/hashlib.h MODULE__IO_DEPS=$(srcdir)/Modules/_io/_iomodule.h MODULE__MD5_DEPS=$(srcdir)/Modules/hashlib.h @@ -2597,7 +2604,7 @@ MODULE__SHA3_DEPS=$(srcdir)/Modules/_sha3/sha3.c $(srcdir)/Modules/_sha3/sha3.h MODULE__SHA512_DEPS=$(srcdir)/Modules/hashlib.h MODULE__SOCKET_DEPS=$(srcdir)/Modules/socketmodule.h $(srcdir)/Modules/addrinfo.h $(srcdir)/Modules/getaddrinfo.c $(srcdir)/Modules/getnameinfo.c MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir)/Modules/_ssl/debughelpers.c $(srcdir)/Modules/_ssl/misc.c $(srcdir)/Modules/_ssl_data.h $(srcdir)/Modules/_ssl_data_111.h $(srcdir)/Modules/_ssl_data_300.h $(srcdir)/Modules/socketmodule.h -MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h +MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h MODULE__SQLITE3_DEPS=$(srcdir)/Modules/_sqlite/connection.h $(srcdir)/Modules/_sqlite/cursor.h $(srcdir)/Modules/_sqlite/microprotocols.h $(srcdir)/Modules/_sqlite/module.h $(srcdir)/Modules/_sqlite/prepare_protocol.h $(srcdir)/Modules/_sqlite/row.h $(srcdir)/Modules/_sqlite/util.h # IF YOU PUT ANYTHING HERE IT WILL GO AWAY diff --git a/Misc/ACKS b/Misc/ACKS index 5d97067b85d3d4..d50cb3c2d1ee4f 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1320,6 +1320,7 @@ Michele Orrù Tomáš Orsava Oleg Oshmyan Denis Osipov +Itamar Ostricher Denis S. Otkidach Peter Otten Michael Otteneder @@ -1627,6 +1628,7 @@ Silas Sewell Ian Seyer Dmitry Shachnev Anish Shah +Jaineel Shah Daniel Shahaf Hui Shang Geoff Shannon diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst new file mode 100644 index 00000000000000..4029856ee332a9 --- /dev/null +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -0,0 +1,1100 @@ +.. date: 2022-11-04-09-29-36 +.. gh-issue: 98433 +.. nonce: l76c5G +.. release date: 2022-11-14 +.. section: Security + +The IDNA codec decoder used on DNS hostnames by :mod:`socket` or +:mod:`asyncio` related name resolution functions no longer involves a +quadratic algorithm. This prevents a potential CPU denial of service if an +out-of-spec excessive length hostname involving bidirectional characters +were decoded. Some protocols such as :mod:`urllib` http ``3xx`` redirects +potentially allow for an attacker to supply such a name. + +Individual labels within an IDNA encoded DNS name will now raise an error +early during IDNA decoding if they are longer than 1024 unicode characters +given that each decoded DNS label must be 63 or fewer characters and the +entire decoded DNS name is limited to 255. Only an application presenting a +hostname or label consisting primarily of :rfc:`3454` section 3.1 "Nothing" +characters to be removed would run into of this new limit. See also +:rfc:`5894` section 6 and :rfc:`3491`. + +.. + +.. date: 2022-10-26-21-04-23 +.. gh-issue: 98739 +.. nonce: keBWcY +.. section: Security + +Update bundled libexpat to 2.5.0 + +.. + +.. date: 2022-11-11-14-48-17 +.. gh-issue: 81057 +.. nonce: ik4iOv +.. section: Core and Builtins + +The docs clearly say that ``PyImport_Inittab``, +:c:func:`PyImport_AppendInittab`, and :c:func:`PyImport_ExtendInittab` +should not be used after :c:func:`Py_Initialize` has been called. We now +enforce this for the two functions. Additionally, the runtime now uses an +internal copy of ``PyImport_Inittab``, to guard against modification. + +.. + +.. date: 2022-11-09-12-07-24 +.. gh-issue: 99298 +.. nonce: NeArAJ +.. section: Core and Builtins + +Fix an issue that could potentially cause incorrect error handling for some +bytecode instructions. + +.. + +.. date: 2022-11-08-17-47-10 +.. gh-issue: 99254 +.. nonce: RSvyFt +.. section: Core and Builtins + +The compiler now removes all unused constants from code objects (except the +first one, which may be a docstring). + +.. + +.. date: 2022-11-08-16-35-25 +.. gh-issue: 99205 +.. nonce: 2YOoFT +.. section: Core and Builtins + +Fix an issue that prevented :c:type:`PyThreadState` and +:c:type:`PyInterpreterState` memory from being freed properly. + +.. + +.. date: 2022-11-07-14-16-59 +.. gh-issue: 81057 +.. nonce: 3uKlLQ +.. section: Core and Builtins + +The 18 global C variables holding the state of the allocators have been +moved to ``_PyRuntimeState``. This is a strictly internal change with no +change in behavior. + +.. + +.. date: 2022-11-07-10-29-41 +.. gh-issue: 99181 +.. nonce: bfG4bI +.. section: Core and Builtins + +Fix failure in :keyword:`except* <except_star>` with unhashable exceptions. + +.. + +.. date: 2022-11-07-08-17-12 +.. gh-issue: 99204 +.. nonce: Mf4hMD +.. section: Core and Builtins + +Fix calculation of :data:`sys._base_executable` when inside a POSIX virtual +environment using copies of the python binary when the base installation +does not provide the executable name used by the venv. Calculation will fall +back to alternative names ("python<MAJOR>", "python<MAJOR>.<MINOR>"). + +.. + +.. date: 2022-11-06-22-59-02 +.. gh-issue: 96055 +.. nonce: TmQuJn +.. section: Core and Builtins + +Update :mod:`faulthandler` to emit an error message with the proper +unexpected signal number. Patch by Dong-hee Na. + +.. + +.. date: 2022-11-06-13-25-01 +.. gh-issue: 99153 +.. nonce: uE3CVL +.. section: Core and Builtins + +Fix location of :exc:`SyntaxError` for a :keyword:`try` block with both +:keyword:`except` and :keyword:`except* <except_star>`. + +.. + +.. date: 2022-11-06-00-47-11 +.. gh-issue: 98686 +.. nonce: DBDy6U +.. section: Core and Builtins + +Merge the adaptive opcode logic into each instruction's unquickened variant, +and merge the logic in ``EXTENDED_ARG_QUICK`` into :opcode:`EXTENDED_ARG`. +With these changes, the quickening that happens at code object creation is +now only responsible for initializing warmup counters and inserting +superinstructions. + +.. + +.. date: 2022-11-06-00-17-58 +.. gh-issue: 99103 +.. nonce: bFA9BX +.. section: Core and Builtins + +Fix the error reporting positions of specialized traceback anchors when the +source line contains Unicode characters. + +.. + +.. date: 2022-11-05-18-36-27 +.. gh-issue: 99139 +.. nonce: cI9vV1 +.. section: Core and Builtins + +Improve the error suggestion for :exc:`NameError` exceptions for instances. +Now if a :exc:`NameError` is raised in a method and the instance has an +attribute that's exactly equal to the name in the exception, the suggestion +will include ``self.<NAME>`` instead of the closest match in the method +scope. Patch by Pablo Galindo + +.. + +.. date: 2022-11-03-13-11-17 +.. gh-issue: 98401 +.. nonce: CBS4nv +.. section: Core and Builtins + +Octal escapes with value larger than ``0o377`` (ex: ``"\477"``), deprecated +in Python 3.11, now produce a :exc:`SyntaxWarning`, instead of +:exc:`DeprecationWarning`. In a future Python version they will be +eventually a :exc:`SyntaxError`. Patch by Victor Stinner. + +.. + +.. date: 2022-11-02-17-02-06 +.. gh-issue: 98401 +.. nonce: y-dbVW +.. section: Core and Builtins + +A backslash-character pair that is not a valid escape sequence now generates +a :exc:`SyntaxWarning`, instead of :exc:`DeprecationWarning`. For example, +``re.compile("\d+\.\d+")`` now emits a :exc:`SyntaxWarning` (``"\d"`` is an +invalid escape sequence), use raw strings for regular expression: +``re.compile(r"\d+\.\d+")``. In a future Python version, :exc:`SyntaxError` +will eventually be raised, instead of :exc:`SyntaxWarning`. Patch by Victor +Stinner. + +.. + +.. date: 2022-11-02-14-42-35 +.. gh-issue: 96793 +.. nonce: q0Oi74 +.. section: Core and Builtins + +Handle StopIteration and StopAsyncIteration raised in generator or +coroutines in the bytecode, rather than in wrapping C code. + +.. + +.. date: 2022-10-31-22-55-34 +.. gh-issue: 98931 +.. nonce: AoWZ-4 +.. section: Core and Builtins + +Improve the :exc:`SyntaxError` error message when the user types ``import x +from y`` instead of ``from y import x``. Patch by Pablo Galindo + +.. + +.. date: 2022-10-31-21-01-35 +.. gh-issue: 98852 +.. nonce: MYaRN6 +.. section: Core and Builtins + +Fix subscription of type aliases containing bare generic types or types like +:class:`~typing.TypeVar`: for example ``tuple[A, T][int]`` and +``tuple[TypeVar, T][int]``, where ``A`` is a generic type, and ``T`` is a +type variable. + +.. + +.. date: 2022-10-31-18-03-10 +.. gh-issue: 98925 +.. nonce: zpdjVd +.. section: Core and Builtins + +Lower the recursion depth for marshal on WASI to support (in-development) +wasmtime 2.0. + +.. + +.. date: 2022-10-28-14-52-55 +.. gh-issue: 98783 +.. nonce: iG0kMs +.. section: Core and Builtins + +Fix multiple crashes in debug mode when ``str`` subclasses are used instead +of ``str`` itself. + +.. + +.. date: 2022-10-28-13-59-51 +.. gh-issue: 98811 +.. nonce: XQypJa +.. section: Core and Builtins + +Use complete source locations to simplify detection of ``__future__`` +imports which are not at the beginning of the file. Also corrects the offset +in the exception raised in one case, which was off by one and impeded +highlighting. + +.. + +.. date: 2022-10-28-09-42-51 +.. gh-issue: 96793 +.. nonce: ucBfWO +.. section: Core and Builtins + +Add specialization of :opcode:`FOR_ITER` for generators. Saves multiple +layers of dispatch and checking to get from the :opcode:`FOR_ITER` +instruction in the caller to the :opcode:`RESUME` in the generator. + +.. + +.. date: 2022-10-27-16-42-16 +.. gh-issue: 98762 +.. nonce: Eb2kzg +.. section: Core and Builtins + +Fix source locations of :keyword:`match` sub-patterns. + +.. + +.. date: 2022-10-24-10-30-30 +.. gh-issue: 98586 +.. nonce: Tha5Iy +.. section: Core and Builtins + +Added the methods :c:func:`PyObject_Vectorcall` and +:c:func:`PyObject_VectorcallMethod` to the :ref:`Limited API <stable>` along +with the auxiliary macro constant :const:`PY_VECTORCALL_ARGUMENTS_OFFSET`. + +The availability of these functions enables more efficient :PEP:`590` vector +calls from binary extension modules that avoid argument boxing/unboxing +overheads. + +.. + +.. date: 2022-10-21-11-28-53 +.. gh-issue: 99257 +.. nonce: nmcuf- +.. section: Core and Builtins + +Fix an issue where member descriptors (such as those for +:attr:`~object.__slots__`) could behave incorrectly or crash instead of +raising a :exc:`TypeError` when accessed via an instance of an invalid type. + +.. + +.. date: 2022-10-19-23-54-43 +.. gh-issue: 93143 +.. nonce: 1wCYub +.. section: Core and Builtins + +Rather than changing :attr:`~types.CodeType.co_code`, the interpreter will +now display a :exc:`RuntimeWarning` and assign :const:`None` to any fast +locals that are left unbound after jumps or :keyword:`del` statements +executed while tracing. + +.. + +.. date: 2022-10-19-15-59-08 +.. gh-issue: 96421 +.. nonce: e22y3r +.. section: Core and Builtins + +When calling into Python code from C code, through +:c:func:`PyEval_EvalFrameEx` or a related C-API function, a shim frame in +inserted into the call stack. This occurs in the +``_PyEval_EvalFrameDefault()`` function. The extra frame should be invisible +to all Python and most C extensions, but out-of-process profilers and +debuggers need to be aware of it. These shim frames can be detected by +checking ``frame->owner == FRAME_OWNED_BY_CSTACK``. + +Extensions implementing their own interpreters using PEP 523 need to be +aware of this shim frame and the changes to the semantics of +:opcode:`RETURN_VALUE`, :opcode:`YIELD_VALUE`, and +:opcode:`RETURN_GENERATOR`, which now clear the frame. + +.. + +.. date: 2022-10-19-01-01-08 +.. gh-issue: 98415 +.. nonce: ZS2eWh +.. section: Core and Builtins + +Fix detection of MAC addresses for :mod:`uuid` on certain OSs. Patch by +Chaim Sanders + +.. + +.. date: 2022-10-16-13-26-46 +.. gh-issue: 98686 +.. nonce: D9Gu_Q +.. section: Core and Builtins + +Quicken all code objects, and specialize adaptive bytecode instructions more +aggressively. + +.. + +.. date: 2022-10-15-23-15-14 +.. gh-issue: 92119 +.. nonce: PMSwwG +.. section: Core and Builtins + +Print exception class name instead of its string representation when raising +errors from :mod:`ctypes` calls. + +.. + +.. date: 2022-10-15-22-25-20 +.. gh-issue: 91058 +.. nonce: Uo2kW- +.. section: Core and Builtins + +:exc:`ImportError` raised from failed ``from <module> import <name>`` now +include suggestions for the value of ``<name>`` based on the available names +in ``<module>``. Patch by Pablo Galindo + +.. + +.. date: 2022-09-13-14-07-06 +.. gh-issue: 96793 +.. nonce: 7DLRSm +.. section: Core and Builtins + +The :opcode:`FOR_ITER` now leaves the iterator on the stack on termination +of the loop. This is to assist specialization of loops for generators. + +.. + +.. date: 2022-09-09-16-32-58 +.. gh-issue: 90716 +.. nonce: z4yuYq +.. section: Core and Builtins + +Add _pylong.py module. It includes asymptotically faster algorithms that +can be used for operations on integers with many digits. It is used by +longobject.c to speed up some operations. + +.. + +.. date: 2022-07-30-14-10-27 +.. gh-issue: 95389 +.. nonce: nSGEkG +.. section: Core and Builtins + +Expose :data:`~socket.ETH_P_ALL` and some of the :ref:`ETHERTYPE_* constants +<socket-ethernet-types>` in :mod:`socket`. Patch by Noam Cohen. + +.. + +.. date: 2022-06-10-16-37-44 +.. gh-issue: 93696 +.. nonce: 65BI2R +.. section: Core and Builtins + +Allow :mod:`pdb` to locate source for frozen modules in the standard +library. + +.. + +.. date: 2022-11-12-15-45-51 +.. gh-issue: 99418 +.. nonce: FxfAXS +.. section: Library + +Fix bug in :func:`urllib.parse.urlparse` that causes URL schemes that begin +with a digit, a plus sign, or a minus sign to be parsed incorrectly. + +.. + +.. date: 2022-11-11-18-23-41 +.. gh-issue: 94597 +.. nonce: m6vMDK +.. section: Library + +Deprecate :class:`asyncio.AbstractChildWatcher` to be removed in Python +3.14. Patch by Kumar Aditya. + +.. + +.. date: 2022-11-10-11-51-39 +.. gh-issue: 99305 +.. nonce: 6LzQc3 +.. section: Library + +Improve performance of :func:`secrets.token_hex`. + +.. + +.. date: 2022-11-09-20-48-42 +.. gh-issue: 74044 +.. nonce: zBj26K +.. section: Library + +Fixed bug where :func:`inspect.signature` reported incorrect arguments for +decorated methods. + +.. + +.. date: 2022-11-09-12-16-35 +.. gh-issue: 99275 +.. nonce: klOqoL +.. section: Library + +Fix ``SystemError`` in :mod:`ctypes` when exception was not set during +``__initsubclass__``. + +.. + +.. date: 2022-11-09-08-40-52 +.. gh-issue: 99277 +.. nonce: J1P44O +.. section: Library + +Remove older version of ``_SSLProtocolTransport.get_write_buffer_limits`` in +:mod:`!asyncio.sslproto` + +.. + +.. date: 2022-11-08-11-15-37 +.. gh-issue: 99248 +.. nonce: 1vt8xI +.. section: Library + +fix negative numbers failing in verify() + +.. + +.. date: 2022-11-06-12-44-51 +.. gh-issue: 99155 +.. nonce: vLZOzi +.. section: Library + +Fix :class:`statistics.NormalDist` pickle with ``0`` and ``1`` protocols. + +.. + +.. date: 2022-11-05-23-16-15 +.. gh-issue: 93464 +.. nonce: ucd4vP +.. section: Library + +``enum.auto()`` is now correctly activated when combined with other +assignment values. E.g. ``ONE = auto(), 'some text'`` will now evaluate as +``(1, 'some text')``. + +.. + +.. date: 2022-11-05-17-16-40 +.. gh-issue: 99134 +.. nonce: Msgspf +.. section: Library + +Update the bundled copy of pip to version 22.3.1. + +.. + +.. date: 2022-11-03-15-28-07 +.. gh-issue: 92584 +.. nonce: m5ctkm +.. section: Library + +Remove the ``distutils`` package. It was deprecated in Python 3.10 by +:pep:`632` "Deprecate distutils module". For projects still using +``distutils`` and cannot be updated to something else, the ``setuptools`` +project can be installed: it still provides ``distutils``. Patch by Victor +Stinner. + +.. + +.. date: 2022-11-02-18-27-13 +.. gh-issue: 98999 +.. nonce: Ai2KDh +.. section: Library + +Now :mod:`_pyio` is consistent with :mod:`_io` in raising ``ValueError`` +when executing methods over closed buffers. + +.. + +.. date: 2022-11-02-05-54-02 +.. gh-issue: 83004 +.. nonce: 0v8iyw +.. section: Library + +Clean up refleak on failed module initialisation in :mod:`_zoneinfo` + +.. + +.. date: 2022-11-02-05-53-25 +.. gh-issue: 83004 +.. nonce: qc_KHr +.. section: Library + +Clean up refleaks on failed module initialisation in in :mod:`_pickle` + +.. + +.. date: 2022-11-02-05-52-36 +.. gh-issue: 83004 +.. nonce: LBl79O +.. section: Library + +Clean up refleak on failed module initialisation in :mod:`_io`. + +.. + +.. date: 2022-10-31-12-34-03 +.. gh-issue: 98897 +.. nonce: rgNn4x +.. section: Library + +Fix memory leak in :func:`math.dist` when both points don't have the same +dimension. Patch by Kumar Aditya. + +.. + +.. date: 2022-10-30-22-42-48 +.. gh-issue: 98878 +.. nonce: fgrykp +.. section: Library + +Use the frame bound builtins when offering a name suggestion in +:mod:`traceback` to prevent crashing when ``__builtins__`` is not a dict. + +.. + +.. date: 2022-10-30-15-26-33 +.. gh-issue: 98139 +.. nonce: qtm-9T +.. section: Library + +In :mod:`importlib._bootstrap`, enhance namespace package repr to `<module +'x' (namespace) from ['path']>`. + +.. + +.. date: 2022-10-29-09-42-20 +.. gh-issue: 90352 +.. nonce: t8QEPt +.. section: Library + +Fix ``_SelectorDatagramTransport`` to inherit from +:class:`~asyncio.DatagramTransport` in :mod:`asyncio`. Patch by Kumar +Aditya. + +.. + +.. date: 2022-10-29-03-40-18 +.. gh-issue: 98793 +.. nonce: WSPB4A +.. section: Library + +Fix argument typechecks in :func:`!_overlapped.WSAConnect` and +:func:`!_overlapped.Overlapped.WSASendTo` functions. + +.. + +.. date: 2022-10-28-23-44-17 +.. gh-issue: 98744 +.. nonce: sGHDWm +.. section: Library + +Prevent crashing in :mod:`traceback` when retrieving the byte-offset for +some source files that contain certain unicode characters. + +.. + +.. date: 2022-10-27-12-56-38 +.. gh-issue: 98740 +.. nonce: ZoqqGM +.. section: Library + +Fix internal error in the :mod:`re` module which in very rare circumstances +prevented compilation of a regular expression containing a :ref:`conditional +expression <re-conditional-expression>` without the "else" branch. + +.. + +.. date: 2022-10-26-07-51-55 +.. gh-issue: 98703 +.. nonce: 0hW773 +.. section: Library + +Fix :meth:`asyncio.StreamWriter.drain` to call ``protocol.connection_lost`` +callback only once on Windows. + +.. + +.. date: 2022-10-25-20-17-34 +.. gh-issue: 98624 +.. nonce: YQUPFy +.. section: Library + +Add a mutex to unittest.mock.NonCallableMock to protect concurrent access to +mock attributes. + +.. + +.. date: 2022-10-25-07-00-31 +.. gh-issue: 98658 +.. nonce: nGABW9 +.. section: Library + +The :class:`array.array` class now supports subscripting, making it a +:term:`generic type`. + +.. + +.. date: 2022-10-15-10-43-45 +.. gh-issue: 98284 +.. nonce: SaVHTd +.. section: Library + +Improved :class:`TypeError` message for undefined abstract methods of a +:class:`abc.ABC` instance. The names of the missing methods are surrounded +by single-quotes to highlight them. + +.. + +.. date: 2022-10-10-07-07-31 +.. gh-issue: 96151 +.. nonce: K9fwoq +.. section: Library + +Allow ``BUILTINS`` to be a valid field name for frozen dataclasses. + +.. + +.. date: 2022-10-08-19-39-27 +.. gh-issue: 98086 +.. nonce: y---WC +.. section: Library + +Make sure ``patch.dict()`` can be applied on async functions. + +.. + +.. date: 2022-09-05-17-08-56 +.. gh-issue: 72719 +.. nonce: jUpzF3 +.. section: Library + +Remove modules :mod:`asyncore` and :mod:`asynchat`, which were deprecated by +:pep:`594`. + +.. + +.. date: 2022-08-23-03-13-18 +.. gh-issue: 96192 +.. nonce: TJywOF +.. section: Library + +Fix handling of ``bytes`` :term:`path-like objects <path-like object>` in +:func:`os.ismount()`. + +.. + +.. date: 2022-06-23-15-36-49 +.. gh-issue: 94172 +.. nonce: DzQk0s +.. section: Library + +:mod:`ftplib`: Remove the ``FTP_TLS.ssl_version`` class attribute: use the +*context* parameter instead. Patch by Victor Stinner + +.. + +.. date: 2022-06-23-15-31-49 +.. gh-issue: 94172 +.. nonce: AXE2IZ +.. section: Library + +Remove the *keyfile*, *certfile* and *check_hostname* parameters, deprecated +since Python 3.6, in modules: :mod:`ftplib`, :mod:`http.client`, +:mod:`imaplib`, :mod:`poplib` and :mod:`smtplib`. Use the *context* +parameter (*ssl_context* in :mod:`imaplib`) instead. Patch by Victor +Stinner. + +.. + +.. date: 2022-06-14-22-46-05 +.. gh-issue: 83638 +.. nonce: 73xfGK +.. section: Library + +Add the :attr:`~sqlite3.Connection.autocommit` attribute to +:class:`sqlite3.Connection` and the *autocommit* parameter to +:func:`sqlite3.connect` to control :pep:`249`-compliant :ref:`transaction +handling <sqlite3-transaction-control-autocommit>`. Patch by Erlend E. +Aasland. + +.. + +.. date: 2022-05-08-08-47-32 +.. gh-issue: 92452 +.. nonce: 3pNHe6 +.. section: Library + +Fixed a race condition that could cause :func:`sysconfig.get_config_var` to +incorrectly return :const:`None` in multi-threaded programs. + +.. + +.. date: 2022-05-03-11-32-29 +.. gh-issue: 91803 +.. nonce: pI4Juv +.. section: Library + +Fix an error when using a method of objects mocked with +:func:`unittest.mock.create_autospec` after it was sealed with +:func:`unittest.mock.seal` function. + +.. + +.. bpo: 38523 +.. date: 2020-10-23-22-20-52 +.. nonce: CrkxLh +.. section: Library + +:func:`shutil.copytree` now applies the *ignore_dangling_symlinks* argument +recursively. + +.. + +.. bpo: 40358 +.. date: 2020-04-30-02-15-08 +.. nonce: A4ygqe +.. section: Library + +Add walk_up argument in :meth:`pathlib.PurePath.relative_to`. + +.. + +.. bpo: 36267 +.. date: 2019-09-03-15-45-19 +.. nonce: z42Ex7 +.. section: Library + +Fix IndexError in :class:`argparse.ArgumentParser` when a ``store_true`` +action is given an explicit argument. + +.. + +.. date: 2022-10-29-02-33-46 +.. gh-issue: 98832 +.. nonce: DudEIH +.. section: Documentation + +Changes wording of docstring for :func:`pathlib.Path.iterdir`. + +.. + +.. date: 2022-10-06-13-00-28 +.. gh-issue: 97966 +.. nonce: fz7kFg +.. section: Documentation + +Update uname docs to clarify the special nature of the platform attribute +and to indicate when it became late-bound. + +.. + +.. date: 2022-10-31-14-47-49 +.. gh-issue: 98903 +.. nonce: 7KinCV +.. section: Tests + +The Python test suite now fails wit exit code 4 if no tests ran. It should +help detecting typos in test names and test methods. + +.. + +.. date: 2022-10-26-15-19-20 +.. gh-issue: 98713 +.. nonce: Lnu32R +.. section: Tests + +Fix a bug in the :mod:`typing` tests where a test relying on +CPython-specific implementation details was not decorated with +``@cpython_only`` and was not skipped on other implementations. + +.. + +.. date: 2022-10-15-07-46-48 +.. gh-issue: 87390 +.. nonce: asR-Zo +.. section: Tests + +Add tests for star-unpacking with PEP 646, and some other miscellaneous PEP +646 tests. + +.. + +.. date: 2022-10-12-14-57-06 +.. gh-issue: 96853 +.. nonce: ANe-bw +.. section: Tests + +Added explicit coverage of ``Py_Initialize`` (and hence ``Py_InitializeEx``) +back to the embedding tests (all other embedding tests migrated to +``Py_InitializeFromConfig`` in Python 3.11) + +.. + +.. bpo: 34272 +.. date: 2018-07-29-15-59-51 +.. nonce: lVX2uR +.. section: Tests + +Some C API tests were moved into the new Lib/test/test_capi/ directory. + +.. + +.. date: 2022-11-04-02-58-10 +.. gh-issue: 99086 +.. nonce: DV_4Br +.. section: Build + +Fix ``-Wimplicit-int`` compiler warning in :program:`configure` check for +``PTHREAD_SCOPE_SYSTEM``. + +.. + +.. date: 2022-11-02-19-25-07 +.. gh-issue: 99016 +.. nonce: R05NkD +.. section: Build + +Fix build with ``PYTHON_FOR_REGEN=python3.8``. + +.. + +.. date: 2022-11-02-18-45-35 +.. gh-issue: 97731 +.. nonce: zKpTlj +.. section: Build + +Specify the full path to the source location for ``make docclean`` (needed +for cross-builds). + +.. + +.. date: 2022-11-02-10-56-40 +.. gh-issue: 98949 +.. nonce: 3SRD8C +.. section: Build + +Drop unused build dependency on ``readelf``. + +.. + +.. date: 2022-11-01-21-45-58 +.. gh-issue: 98989 +.. nonce: tMxbdB +.. section: Build + +Use ``python3.11``, if available, for regeneration and freezing. + +.. + +.. date: 2022-10-28-22-24-26 +.. gh-issue: 98831 +.. nonce: IXRCRX +.. section: Build + +Add new tooling, in ``Tools/cases_generator``, to generate the interpreter +switch statement from a list of opcode definitions. This only affects +adding, modifying or removing instruction definitions. The instruction +definitions now live in ``Python/bytecodes.c``, in the form of a `custom DSL +(under development) +<https://github.com/faster-cpython/ideas/blob/main/3.12/interpreter_definition.md>`__. +The tooling reads this file and writes ``Python/generated_cases.c.h``, which +is then included by ``Python/ceval.c`` to provide most of the cases of the +main interpreter switch. + +.. + +.. date: 2022-10-28-18-53-40 +.. gh-issue: 98817 +.. nonce: oPqrtt +.. section: Build + +Remove PCbuild/lib.pyproj: it's not used for anything, is only a minor +convenience for Visual Studio users (who probably mostly don't even know +about it), and it takes a lot of maintenance effort to keep updated. + +.. + +.. date: 2022-10-27-19-47-31 +.. gh-issue: 98776 +.. nonce: lt_UOG +.. section: Build + +Fix ``make regen-test-levenshtein`` for out-of-tree builds. + +.. + +.. date: 2022-10-26-12-37-52 +.. gh-issue: 98707 +.. nonce: eVXGEx +.. section: Build + +Don't use vendored ``libmpdec`` headers if :option:`--with-system-libmpdec` +is passed to :program:`configure`. Don't use vendored ``libexpat`` headers +if :option:`--with-system-expat` is passed to :program:`!configure`. + +.. + +.. date: 2022-11-01-11-07-33 +.. gh-issue: 98689 +.. nonce: 0f6e_N +.. section: Windows + +Update Windows builds to zlib v1.2.13. v1.2.12 has CVE-2022-37434, but the +vulnerable ``inflateGetHeader`` API is not used by Python. + +.. + +.. date: 2022-11-01-00-37-13 +.. gh-issue: 98790 +.. nonce: fpaPAx +.. section: Windows + +Assumes that a missing ``DLLs`` directory means that standard extension +modules are in the executable's directory. + +.. + +.. date: 2022-10-27-20-30-16 +.. gh-issue: 98745 +.. nonce: v06p4r +.. section: Windows + +Update :file:`py.exe` launcher to install 3.11 by default and 3.12 on +request. + +.. + +.. date: 2022-10-26-17-43-09 +.. gh-issue: 98692 +.. nonce: bOopfZ +.. section: Windows + +Fix the :ref:`launcher` ignoring unrecognized shebang lines instead of +treating them as local paths + +.. + +.. date: 2022-10-25-10-34-17 +.. gh-issue: 94328 +.. nonce: 19NhdU +.. section: Windows + +Update Windows installer to use SQLite 3.39.4. + +.. + +.. date: 2022-10-25-10-32-23 +.. gh-issue: 94328 +.. nonce: W3YNC_ +.. section: macOS + +Update macOS installer to SQLite 3.39.4. + +.. + +.. date: 2022-11-04-16-13-35 +.. gh-issue: 98724 +.. nonce: p0urWO +.. section: C API + +The :c:macro:`Py_CLEAR`, :c:macro:`Py_SETREF` and :c:macro:`Py_XSETREF` +macros now only evaluate their argument once. If the argument has side +effects, these side effects are no longer duplicated. Patch by Victor +Stinner. + +.. + +.. date: 2022-11-03-17-46-41 +.. gh-issue: 98978 +.. nonce: KJjBvv +.. section: C API + +Fix use-after-free in ``Py_SetPythonHome(NULL)``, +``Py_SetProgramName(NULL)`` and ``_Py_SetProgramFullPath(NULL)`` function +calls. Issue reported by Benedikt Reinartz. Patch by Victor Stinner. + +.. + +.. date: 2022-10-25-17-50-43 +.. gh-issue: 98410 +.. nonce: NSXYfm +.. section: C API + +Add ``getbufferproc`` and ``releasebufferproc`` to the stable API. + +.. + +.. date: 2022-10-24-12-09-17 +.. gh-issue: 98610 +.. nonce: PLX2Np +.. section: C API + +Some configurable capabilities of sub-interpreters have changed. They always +allow subprocesses (:mod:`subprocess`) now, whereas before subprocesses +could be optionally disaallowed for a sub-interpreter. Instead +:func:`os.exec` can now be disallowed. Disallowing daemon threads is now +supported. Disallowing all threads is still allowed, but is never done by +default. Note that the optional restrictions are only available through +``_Py_NewInterpreterFromConfig()``, which isn't a public API. They do not +affect the main interpreter, nor :c:func:`Py_NewInterpreter`. + +.. + +.. date: 2022-10-24-11-26-55 +.. gh-issue: 98608 +.. nonce: _Q2lNV +.. section: C API + +A ``_PyInterpreterConfig`` has been added and ``_Py_NewInterpreter()`` has +been renamed to ``_Py_NewInterpreterFromConfig()``. The +"isolated_subinterpreters" argument is now a granular config that captures +the previous behavior. Note that this is all "private" API. + +.. + +.. date: 2022-10-16-15-00-25 +.. gh-issue: 96853 +.. nonce: V0wiXP +.. section: C API + +``Py_InitializeEx`` now correctly calls ``PyConfig_Clear`` after +initializing the interpreter (the omission didn't cause a memory leak only +because none of the dynamically allocated config fields are populated by the +wrapper function) + +.. + +.. date: 2022-08-05-15-26-12 +.. gh-issue: 91248 +.. nonce: ujirJJ +.. section: C API + +Add :c:func:`PyFrame_GetVar` and :c:func:`PyFrame_GetVarString` functions to +get a frame variable by its name. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst new file mode 100644 index 00000000000000..3d1e43350d136e --- /dev/null +++ b/Misc/NEWS.d/3.12.0a3.rst @@ -0,0 +1,836 @@ +.. date: 2022-12-05-01-39-10 +.. gh-issue: 100001 +.. nonce: uD05Fc +.. release date: 2022-12-06 +.. section: Security + +``python -m http.server`` no longer allows terminal control characters sent +within a garbage request to be printed to the stderr server log. + +This is done by changing the :mod:`http.server` +:class:`BaseHTTPRequestHandler` ``.log_message`` method to replace control +characters with a ``\xHH`` hex escape before printing. + +.. + +.. date: 2022-11-11-12-50-28 +.. gh-issue: 87604 +.. nonce: OtwH5L +.. section: Security + +Avoid publishing list of active per-interpreter audit hooks via the +:mod:`gc` module + +.. + +.. date: 2022-11-30-11-09-40 +.. gh-issue: 99891 +.. nonce: 9VomwB +.. section: Core and Builtins + +Fix a bug in the tokenizer that could cause infinite recursion when showing +syntax warnings that happen in the first line of the source. Patch by Pablo +Galindo + +.. + +.. date: 2022-11-27-13-50-13 +.. gh-issue: 91054 +.. nonce: oox_kW +.. section: Core and Builtins + +Add :c:func:`PyCode_AddWatcher` and :c:func:`PyCode_ClearWatcher` APIs to +register callbacks to receive notification on creation and destruction of +code objects. + +.. + +.. date: 2022-11-26-04-00-41 +.. gh-issue: 99729 +.. nonce: A3ovwQ +.. section: Core and Builtins + +Fix an issue that could cause frames to be visible to Python code as they +are being torn down, possibly leading to memory corruption or hard crashes +of the interpreter. + +.. + +.. date: 2022-11-23-18-16-18 +.. gh-issue: 99708 +.. nonce: 7MuaiR +.. section: Core and Builtins + +Fix bug where compiler crashes on an if expression with an empty body block. + +.. + +.. date: 2022-11-21-11-27-14 +.. gh-issue: 99578 +.. nonce: DcKoBJ +.. section: Core and Builtins + +Fix a reference bug in :func:`_imp.create_builtin()` after the creation of +the first sub-interpreter for modules ``builtins`` and ``sys``. Patch by +Victor Stinner. + +.. + +.. date: 2022-11-19-22-27-52 +.. gh-issue: 99581 +.. nonce: yKYPbf +.. section: Core and Builtins + +Fixed a bug that was causing a buffer overflow if the tokenizer copies a +line missing the newline caracter from a file that is as long as the +available tokenizer buffer. Patch by Pablo galindo + +.. + +.. date: 2022-11-18-11-24-25 +.. gh-issue: 99553 +.. nonce: F64h-n +.. section: Core and Builtins + +Fix bug where an :exc:`ExceptionGroup` subclass can wrap a +:exc:`BaseException`. + +.. + +.. date: 2022-11-16-21-35-30 +.. gh-issue: 99547 +.. nonce: p_c_bp +.. section: Core and Builtins + +Add a function to os.path to check if a path is a junction: isjunction. Add +similar functionality to pathlib.Path as is_junction. + +.. + +.. date: 2022-11-12-01-39-57 +.. gh-issue: 99370 +.. nonce: _cu32j +.. section: Core and Builtins + +Fix zip path for venv created from a non-installed python on POSIX +platforms. + +.. + +.. date: 2022-11-11-14-04-01 +.. gh-issue: 99377 +.. nonce: -CJvWn +.. section: Core and Builtins + +Add audit events for thread creation and clear operations. + +.. + +.. date: 2022-11-10-17-09-16 +.. gh-issue: 98686 +.. nonce: bmAKwr +.. section: Core and Builtins + +Remove the ``BINARY_OP_GENERIC`` and ``COMPARE_OP_GENERIC`` +"specializations". + +.. + +.. date: 2022-11-10-16-53-40 +.. gh-issue: 99298 +.. nonce: HqRJES +.. section: Core and Builtins + +Remove the remaining error paths for attribute specializations, and refuse +to specialize attribute accesses on types that haven't had +:c:func:`PyType_Ready` called on them yet. + +.. + +.. date: 2022-11-05-22-26-35 +.. gh-issue: 99127 +.. nonce: Btk7ih +.. section: Core and Builtins + +Allow some features of :mod:`syslog` to the main interpreter only. Patch by +Dong-hee Na. + +.. + +.. date: 2022-10-05-11-44-52 +.. gh-issue: 91053 +.. nonce: f5Bo3p +.. section: Core and Builtins + +Optimizing interpreters and JIT compilers may need to invalidate internal +metadata when functions are modified. This change adds the ability to +provide a callback that will be invoked each time a function is created, +modified, or destroyed. + +.. + +.. date: 2022-09-17-17-08-01 +.. gh-issue: 90994 +.. nonce: f0H2Yd +.. section: Core and Builtins + +Improve error messages when there's a syntax error with call arguments. The +following three cases are covered: - No value is assigned to a named +argument, eg ``foo(a=)``. - A value is assigned to a star argument, eg +``foo(*args=[0])``. - A value is assigned to a double-star keyword argument, +eg ``foo(**kwarg={'a': 0})``. + +.. + +.. bpo: 45026 +.. date: 2021-08-29-15-55-19 +.. nonce: z7nTA3 +.. section: Core and Builtins + +Optimize the :class:`range` object iterator. It is now smaller, faster +iteration of ranges containing large numbers. Smaller pickles, faster +unpickling. + +.. + +.. bpo: 31718 +.. date: 2020-02-23-23-48-15 +.. nonce: sXko5e +.. section: Core and Builtins + +Raise :exc:`ValueError` instead of :exc:`SystemError` when methods of +uninitialized :class:`io.IncrementalNewlineDecoder` objects are called. +Patch by Oren Milman. + +.. + +.. bpo: 38031 +.. date: 2019-09-04-19-09-49 +.. nonce: Yq4L72 +.. section: Core and Builtins + +Fix a possible assertion failure in :class:`io.FileIO` when the opener +returns an invalid file descriptor. + +.. + +.. date: 2022-12-05-13-40-15 +.. gh-issue: 100001 +.. nonce: 78ReYp +.. section: Library + +Also \ escape \s in the http.server BaseHTTPRequestHandler.log_message so +that it is technically possible to parse the line and reconstruct what the +original data was. Without this a \xHH is ambiguious as to if it is a hex +replacement we put in or the characters r"\x" came through in the original +request line. + +.. + +.. date: 2022-12-03-05-58-48 +.. gh-issue: 99957 +.. nonce: jLYYgN +.. section: Library + +Add ``frozen_default`` parameter to :func:`typing.dataclass_transform`. + +.. + +.. date: 2022-11-22-19-31-26 +.. gh-issue: 79033 +.. nonce: MW6kHq +.. section: Library + +Fix :func:`asyncio.Server.wait_closed` to actually do what the docs promise +-- wait for all existing connections to complete, after closing the server. + +.. + +.. date: 2022-11-21-17-56-18 +.. gh-issue: 51524 +.. nonce: nTykx8 +.. section: Library + +Fix bug when calling trace.CoverageResults with valid infile. + +.. + +.. date: 2022-11-21-13-49-03 +.. gh-issue: 99645 +.. nonce: 9w1QKq +.. section: Library + +Fix a bug in handling class cleanups in :class:`unittest.TestCase`. Now +``addClassCleanup()`` uses separate lists for different ``TestCase`` +subclasses, and ``doClassCleanups()`` only cleans up the particular class. + +.. + +.. date: 2022-11-21-10-45-54 +.. gh-issue: 99508 +.. nonce: QqVbby +.. section: Library + +Fix ``TypeError`` in ``Lib/importlib/_bootstrap_external.py`` while calling +``_imp.source_hash()``. + +.. + +.. date: 2022-11-17-10-56-47 +.. gh-issue: 66285 +.. nonce: KvjlaB +.. section: Library + +Fix :mod:`asyncio` to not share event loop and signal wakeupfd in forked +processes. Patch by Kumar Aditya. + +.. + +.. date: 2022-11-15-10-55-24 +.. gh-issue: 97001 +.. nonce: KeQuVF +.. section: Library + +Release the GIL when calling termios APIs to avoid blocking threads. + +.. + +.. date: 2022-11-15-04-08-25 +.. gh-issue: 92647 +.. nonce: cZcjnJ +.. section: Library + +Use final status of an enum to determine lookup or creation branch of +functional API. + +.. + +.. date: 2022-11-14-08-21-56 +.. gh-issue: 99388 +.. nonce: UWSlwp +.. section: Library + +Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying a +custom event loop factory. Patch by Kumar Aditya. + +.. + +.. date: 2022-11-13-02-06-56 +.. gh-issue: 99341 +.. nonce: 8-OlwB +.. section: Library + +Fix :func:`ast.increment_lineno` to also cover :class:`ast.TypeIgnore` when +changing line numbers. + +.. + +.. date: 2022-11-12-12-15-30 +.. gh-issue: 99382 +.. nonce: dKg_rW +.. section: Library + +Check the number of arguments in substitution in user generics containing a +:class:`~typing.TypeVarTuple` and one or more :class:`~typing.TypeVar`. + +.. + +.. date: 2022-11-12-12-10-23 +.. gh-issue: 99379 +.. nonce: bcGhxF +.. section: Library + +Fix substitution of :class:`~typing.ParamSpec` followed by +:class:`~typing.TypeVarTuple` in generic aliases. + +.. + +.. date: 2022-11-12-12-08-34 +.. gh-issue: 99344 +.. nonce: 7M_u8G +.. section: Library + +Fix substitution of :class:`~typing.TypeVarTuple` and +:class:`~typing.ParamSpec` together in user generics. + +.. + +.. date: 2022-11-09-12-36-12 +.. gh-issue: 99284 +.. nonce: 9p4J2l +.. section: Library + +Remove ``_use_broken_old_ctypes_structure_semantics_`` old untested and +undocumented hack from :mod:`ctypes`. + +.. + +.. date: 2022-11-09-03-34-29 +.. gh-issue: 99201 +.. nonce: lDJ7xI +.. section: Library + +Fix :exc:`IndexError` when initializing the config variables on Windows if +``HAVE_DYNAMIC_LOADING`` is not set. + +.. + +.. date: 2022-11-08-15-54-43 +.. gh-issue: 99240 +.. nonce: MhYwcz +.. section: Library + +Fix double-free bug in Argument Clinic ``str_converter`` by extracting +memory clean up to a new ``post_parsing`` section. + +.. + +.. date: 2022-11-08-11-18-51 +.. gh-issue: 64490 +.. nonce: VcBgrN +.. section: Library + +Fix refcount error when arguments are packed to tuple in Argument Clinic. + +.. + +.. date: 2022-11-02-23-47-07 +.. gh-issue: 99029 +.. nonce: 7uCiIB +.. section: Library + +:meth:`pathlib.PurePath.relative_to()` now treats naked Windows drive paths +as relative. This brings its behaviour in line with other parts of pathlib. + +.. + +.. date: 2022-10-24-11-01-05 +.. gh-issue: 98253 +.. nonce: HVd5v4 +.. section: Library + +The implementation of the typing module is now more resilient to reference +leaks in binary extension modules. + +Previously, a reference leak in a typed C API-based extension module could +leak internals of the typing module, which could in turn introduce leaks in +essentially any other package with typed function signatures. Although the +typing package is not the original source of the problem, such non-local +dependences exacerbate debugging of large-scale projects, and the +implementation was therefore changed to reduce harm by providing better +isolation. + +.. + +.. date: 2022-10-19-18-31-53 +.. gh-issue: 98458 +.. nonce: vwyq7O +.. section: Library + +Fix infinite loop in unittest when a self-referencing chained exception is +raised + +.. + +.. date: 2022-10-19-13-37-23 +.. gh-issue: 93453 +.. nonce: wTB_sH +.. section: Library + +:func:`asyncio.get_event_loop` and many other :mod:`asyncio` functions like +:func:`asyncio.ensure_future`, :func:`asyncio.shield` or +:func:`asyncio.gather`, and also the +:meth:`~asyncio.BaseDefaultEventLoopPolicy.get_event_loop` method of +:class:`asyncio.BaseDefaultEventLoopPolicy` now raise a :exc:`RuntimeError` +if called when there is no running event loop and the current event loop was +not set. Previously they implicitly created and set a new current event +loop. :exc:`DeprecationWarning` is no longer emitted if there is no running +event loop but the current event loop was set. + +.. + +.. date: 2022-10-16-18-52-00 +.. gh-issue: 97966 +.. nonce: humlhz +.. section: Library + +On ``uname_result``, restored expectation that ``_fields`` and ``_asdict`` +would include all six properties including ``processor``. + +.. + +.. date: 2022-10-13-22-13-54 +.. gh-issue: 98248 +.. nonce: lwyygy +.. section: Library + +Provide informative error messages in :func:`struct.pack` when its integral +arguments are not in range. + +.. + +.. date: 2022-10-08-19-20-33 +.. gh-issue: 98108 +.. nonce: WUObqM +.. section: Library + +``zipfile.Path`` is now pickleable if its initialization parameters were +pickleable (e.g. for file system paths). + +.. + +.. date: 2022-10-08-15-41-00 +.. gh-issue: 98098 +.. nonce: DugpWi +.. section: Library + +Created packages from zipfile and test_zipfile modules, separating +``zipfile.Path`` functionality. + +.. + +.. date: 2022-10-02-12-38-22 +.. gh-issue: 82836 +.. nonce: OvYLmC +.. section: Library + +Fix :attr:`~ipaddress.IPv4Address.is_private` properties in the +:mod:`ipaddress` module. Previously non-private networks (0.0.0.0/0) would +return True from this method; now they correctly return False. + +.. + +.. date: 2022-09-14-21-56-15 +.. gh-issue: 96828 +.. nonce: ZoOY5G +.. section: Library + +Add an :data:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel +TLS (kTLS). Patch by Illia Volochii. + +.. + +.. date: 2022-08-06-12-18-07 +.. gh-issue: 88863 +.. nonce: NnqsuJ +.. section: Library + +To avoid apparent memory leaks when :func:`asyncio.open_connection` raises, +break reference cycles generated by local exception and future instances +(which has exception instance as its member var). Patch by Dong Uk, Kang. + +.. + +.. date: 2022-04-23-03-46-37 +.. gh-issue: 91078 +.. nonce: 87-hkp +.. section: Library + +:meth:`TarFile.next` now returns ``None`` when called on an empty tarfile. + +.. + +.. bpo: 47220 +.. date: 2022-04-04-22-54-11 +.. nonce: L9jYu4 +.. section: Library + +Document the optional *callback* parameter of :class:`WeakMethod`. Patch by +Géry Ogam. + +.. + +.. bpo: 44817 +.. date: 2021-08-03-05-31-00 +.. nonce: wOW_Qn +.. section: Library + +Ignore WinError 53 (ERROR_BAD_NETPATH), 65 (ERROR_NETWORK_ACCESS_DENIED) and +161 (ERROR_BAD_PATHNAME) when using ntpath.realpath(). + +.. + +.. bpo: 41260 +.. date: 2020-08-02-23-46-22 +.. nonce: Q2BNzY +.. section: Library + +Rename the *fmt* parameter of the pure Python implementation of +:meth:`datetime.date.strftime` to *format*. + +.. + +.. bpo: 15999 +.. date: 2019-08-30-10-48-53 +.. nonce: QqsRRi +.. section: Library + +All built-in functions now accept arguments of any type instead of just +``bool`` and ``int`` for boolean parameters. + +.. + +.. date: 2022-12-02-17-08-08 +.. gh-issue: 99931 +.. nonce: wC46hE +.. section: Documentation + +Use `sphinxext-opengraph <https://sphinxext-opengraph.readthedocs.io/>`__ to +generate `OpenGraph metadata <https://ogp.me/>`__. + +.. + +.. date: 2022-11-26-21-43-05 +.. gh-issue: 89682 +.. nonce: DhKoTM +.. section: Documentation + +Reworded docstring of the default ``__contains__`` to clarify that it +returns a :class:`bool`. + +.. + +.. date: 2022-11-26-15-51-23 +.. gh-issue: 88330 +.. nonce: B_wFq8 +.. section: Documentation + +Improved the description of what a resource is in importlib.resources docs. + +.. + +.. date: 2022-11-16-12-52-23 +.. gh-issue: 92892 +.. nonce: TS-P0j +.. section: Documentation + +Document that calling variadic functions with ctypes requires special care +on macOS/arm64 (and possibly other platforms). + +.. + +.. bpo: 41825 +.. date: 2020-09-22-12-32-16 +.. nonce: npcaCb +.. section: Documentation + +Restructured the documentation for the :func:`os.wait* <os.wait>` family of +functions, and improved the docs for :func:`os.waitid` with more explanation +of the possible argument constants. + +.. + +.. date: 2022-12-05-16-12-56 +.. gh-issue: 99892 +.. nonce: sz_eW8 +.. section: Tests + +Skip test_normalization() of test_unicodedata if it fails to download +NormalizationTest.txt file from pythontest.net. Patch by Victor Stinner. + +.. + +.. date: 2022-12-01-18-55-18 +.. gh-issue: 99934 +.. nonce: Ox3Fqf +.. section: Tests + +Correct test_marsh on (32 bit) x86: test_deterministic sets was failing. + +.. + +.. date: 2022-11-23-18-32-16 +.. gh-issue: 99741 +.. nonce: q4R7NH +.. section: Tests + +We've implemented multi-phase init (PEP 489/630/687) for the internal (for +testing) _xxsubinterpreters module. + +.. + +.. date: 2022-11-21-19-21-30 +.. gh-issue: 99659 +.. nonce: 4gP0nm +.. section: Tests + +Optional big memory tests in ``test_sqlite3`` now catch the correct +:exc:`sqlite.DataError` exception type in case of too large strings and/or +blobs passed. + +.. + +.. date: 2022-11-19-13-34-28 +.. gh-issue: 99593 +.. nonce: 8ZfCkj +.. section: Tests + +Cover the Unicode C API with tests. + +.. + +.. date: 2022-08-22-15-49-14 +.. gh-issue: 96002 +.. nonce: 4UE9UE +.. section: Tests + +Add functional test for Argument Clinic. + +.. + +.. date: 2022-11-24-02-58-10 +.. gh-issue: 99086 +.. nonce: DV_4Br +.. section: Build + +Fix ``-Wimplicit-int``, ``-Wstrict-prototypes``, and +``-Wimplicit-function-declaration`` compiler warnings in +:program:`configure` checks. + +.. + +.. date: 2022-11-15-08-40-22 +.. gh-issue: 99337 +.. nonce: 5LoQDE +.. section: Build + +Fix a compilation issue with GCC 12 on macOS. + +.. + +.. date: 2022-11-09-14-42-48 +.. gh-issue: 99289 +.. nonce: X7wFE1 +.. section: Build + +Add a ``COMPILEALL_OPTS`` variable in Makefile to override :mod:`compileall` +options (default: ``-j0``) in ``make install``. Also merged the +``compileall`` commands into a single command building .pyc files for the +all optimization levels (0, 1, 2) at once. Patch by Victor Stinner. + +.. + +.. date: 2022-11-03-08-10-49 +.. gh-issue: 98872 +.. nonce: gdsR8X +.. section: Build + +Fix a possible fd leak in ``Programs/_freeze_module.c`` introduced in Python +3.11. + +.. + +.. date: 2022-10-16-12-49-24 +.. gh-issue: 88226 +.. nonce: BsnQ4k +.. section: Build + +Always define ``TARGET_*`` labels in ``Python/ceval.c``, even if +``USE_COMPUTED_GOTOS`` is disabled. This allows breakpoints to be set at +those labels in (for instance) ``gdb``. + +.. + +.. date: 2022-11-23-17-17-16 +.. gh-issue: 99345 +.. nonce: jOa3-f +.. section: Windows + +Use faster initialization functions to detect install location for Windows +Store package + +.. + +.. date: 2022-11-21-19-50-18 +.. gh-issue: 98629 +.. nonce: tMmB_B +.. section: Windows + +Fix initialization of :data:`sys.version` and ``sys._git`` on Windows + +.. + +.. date: 2022-11-16-19-03-21 +.. gh-issue: 99442 +.. nonce: 6Dgk3Q +.. section: Windows + +Fix handling in :ref:`launcher` when ``argv[0]`` does not include a file +extension. + +.. + +.. bpo: 40882 +.. date: 2020-06-06-15-10-37 +.. nonce: UvNbdj +.. section: Windows + +Fix a memory leak in :class:`multiprocessing.shared_memory.SharedMemory` on +Windows. + +.. + +.. date: 2022-11-25-09-23-20 +.. gh-issue: 87235 +.. nonce: SifjCD +.. section: macOS + +On macOS ``python3 /dev/fd/9 9</path/to/script.py`` failed for any script +longer than a couple of bytes. + +.. + +.. date: 2022-11-01-10-32-23 +.. gh-issue: 98940 +.. nonce: W3YzC_ +.. section: macOS + +Fix ``Mac/Extras.install.py`` file filter bug. + +.. + +.. date: 2022-08-11-09-58-15 +.. gh-issue: 64490 +.. nonce: PjwhM4 +.. section: Tools/Demos + +Argument Clinic varargs bugfixes + +* Fix out-of-bounds error in :c:func:`!_PyArg_UnpackKeywordsWithVararg`. +* Fix incorrect check which allowed more than one varargs in clinic.py. +* Fix miscalculation of ``noptargs`` in generated code. +* Do not generate ``noptargs`` when there is a vararg argument and no optional argument. + +.. + +.. date: 2022-12-05-17-30-13 +.. gh-issue: 98680 +.. nonce: FiMCxZ +.. section: C API + +``PyBUF_*`` constants were marked as part of Limited API of Python 3.11+. +These were available in 3.11.0 with :c:macro:`Py_LIMITED_API` defined for +3.11, and are necessary to use the buffer API. + +.. + +.. date: 2022-11-20-09-52-50 +.. gh-issue: 99612 +.. nonce: eBHksg +.. section: C API + +Fix :c:func:`PyUnicode_DecodeUTF8Stateful` for ASCII-only data: +``*consumed`` was not set. + +.. + +.. date: 2022-11-02-16-51-24 +.. gh-issue: 47146 +.. nonce: dsYDtI +.. section: C API + +The ``structmember.h`` header is deprecated. Its non-deprecated contents are +now available just by including ``Python.h``, with a ``Py_`` prefix added if +it was missing. (Deprecated contents are :c:macro:`T_OBJECT`, +:c:macro:`T_NONE`, and no-op flags.) Patch by Petr Viktorin, based on +earlier work by Alexander Belopolsky and Matthias Braun. diff --git a/Misc/NEWS.d/next/Build/2022-10-27-19-47-31.gh-issue-98776.lt_UOG.rst b/Misc/NEWS.d/next/Build/2022-10-27-19-47-31.gh-issue-98776.lt_UOG.rst deleted file mode 100644 index 049f13463b8c4d..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-10-27-19-47-31.gh-issue-98776.lt_UOG.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``make regen-test-levenshtein`` for out-of-tree builds. diff --git a/Misc/NEWS.d/next/Build/2022-12-08-14-00-04.gh-issue-88267.MqtRbm.rst b/Misc/NEWS.d/next/Build/2022-12-08-14-00-04.gh-issue-88267.MqtRbm.rst new file mode 100644 index 00000000000000..29c3f5a1d76355 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-12-08-14-00-04.gh-issue-88267.MqtRbm.rst @@ -0,0 +1 @@ +Avoid exporting Python symbols in linked Windows applications when the core is built as static. diff --git a/Misc/NEWS.d/next/C API/2022-10-16-15-00-25.gh-issue-96853.V0wiXP.rst b/Misc/NEWS.d/next/C API/2022-10-16-15-00-25.gh-issue-96853.V0wiXP.rst deleted file mode 100644 index d7e3cc423ac895..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-10-16-15-00-25.gh-issue-96853.V0wiXP.rst +++ /dev/null @@ -1,4 +0,0 @@ -``Py_InitializeEx`` now correctly calls ``PyConfig_Clear`` after initializing -the interpreter (the omission didn't cause a memory leak only because none -of the dynamically allocated config fields are populated by the wrapper -function) diff --git a/Misc/NEWS.d/next/C API/2022-10-24-11-26-55.gh-issue-98608._Q2lNV.rst b/Misc/NEWS.d/next/C API/2022-10-24-11-26-55.gh-issue-98608._Q2lNV.rst deleted file mode 100644 index 5abbd96a4261f7..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-10-24-11-26-55.gh-issue-98608._Q2lNV.rst +++ /dev/null @@ -1,4 +0,0 @@ -A ``_PyInterpreterConfig`` has been added and ``_Py_NewInterpreter()`` has -been renamed to ``_Py_NewInterpreterFromConfig()``. The -"isolated_subinterpreters" argument is now a granular config that captures -the previous behavior. Note that this is all "private" API. diff --git a/Misc/NEWS.d/next/C API/2022-10-24-12-09-17.gh-issue-98610.PLX2Np.rst b/Misc/NEWS.d/next/C API/2022-10-24-12-09-17.gh-issue-98610.PLX2Np.rst deleted file mode 100644 index 05bcfae1b61b40..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-10-24-12-09-17.gh-issue-98610.PLX2Np.rst +++ /dev/null @@ -1,9 +0,0 @@ -Some configurable capabilities of sub-interpreters have changed. -They always allow subprocesses (:mod:`subprocess`) now, whereas before -subprocesses could be optionally disaallowed for a sub-interpreter. -Instead :func:`os.exec` can now be disallowed. -Disallowing daemon threads is now supported. Disallowing all threads -is still allowed, but is never done by default. -Note that the optional restrictions are only available through -``_Py_NewInterpreterFromConfig()``, which isn't a public API. -They do not affect the main interpreter, nor :c:func:`Py_NewInterpreter`. diff --git a/Misc/NEWS.d/next/C API/2022-10-25-17-50-43.gh-issue-98410.NSXYfm.rst b/Misc/NEWS.d/next/C API/2022-10-25-17-50-43.gh-issue-98410.NSXYfm.rst deleted file mode 100644 index d98bc4e8103168..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-10-25-17-50-43.gh-issue-98410.NSXYfm.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``getbufferproc`` and ``releasebufferproc`` to the stable API. diff --git a/Misc/NEWS.d/next/C API/2022-11-04-16-13-35.gh-issue-98724.p0urWO.rst b/Misc/NEWS.d/next/C API/2022-11-04-16-13-35.gh-issue-98724.p0urWO.rst new file mode 100644 index 00000000000000..e6c26cde168dbf --- /dev/null +++ b/Misc/NEWS.d/next/C API/2022-11-04-16-13-35.gh-issue-98724.p0urWO.rst @@ -0,0 +1,3 @@ +The :c:macro:`Py_CLEAR`, :c:macro:`Py_SETREF` and :c:macro:`Py_XSETREF` macros +now only evaluate their arguments once. If an argument has side effects, these +side effects are no longer duplicated. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-16-37-44.gh-issue-93696.65BI2R.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-10-16-37-44.gh-issue-93696.65BI2R.rst deleted file mode 100644 index 8eadab0ad8fb78..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-16-37-44.gh-issue-93696.65BI2R.rst +++ /dev/null @@ -1 +0,0 @@ -Allow :mod:`pdb` to locate source for frozen modules in the standard library. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-09-16-32-58.gh-issue-90716.z4yuYq.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-09-16-32-58.gh-issue-90716.z4yuYq.rst deleted file mode 100644 index d04e2c03c74736..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-09-16-32-58.gh-issue-90716.z4yuYq.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add _pylong.py module. It includes asymptotically faster algorithms that -can be used for operations on integers with many digits. It is used by -longobject.c to speed up some operations. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-13-14-07-06.gh-issue-96793.7DLRSm.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-13-14-07-06.gh-issue-96793.7DLRSm.rst deleted file mode 100644 index dc03fab223650b..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-13-14-07-06.gh-issue-96793.7DLRSm.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :opcode:`FOR_ITER` now leaves the iterator on the stack on termination -of the loop. This is to assist specialization of loops for generators. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-15-22-25-20.gh-issue-91058.Uo2kW-.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-15-22-25-20.gh-issue-91058.Uo2kW-.rst deleted file mode 100644 index 042c7cebd894ac..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-15-22-25-20.gh-issue-91058.Uo2kW-.rst +++ /dev/null @@ -1,3 +0,0 @@ -:exc:`ImportError` raised from failed ``from <module> import <name>`` now -include suggestions for the value of ``<name>`` based on the available names -in ``<module>``. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-21-16-10-39.gh-issue-98522.s_SixG.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-21-16-10-39.gh-issue-98522.s_SixG.rst new file mode 100644 index 00000000000000..d923af198f8203 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-10-21-16-10-39.gh-issue-98522.s_SixG.rst @@ -0,0 +1,3 @@ +Add an internal version number to code objects, to give better versioning of +inner functions and comprehensions, and thus better specialization of those +functions. This change is invisible to both Python and C extensions. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-24-10-30-30.gh-issue-98586.Tha5Iy.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-24-10-30-30.gh-issue-98586.Tha5Iy.rst deleted file mode 100644 index 5d7b0c89249657..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-24-10-30-30.gh-issue-98586.Tha5Iy.rst +++ /dev/null @@ -1,7 +0,0 @@ -Added the methods :c:func:`PyObject_Vectorcall` and -:c:func:`PyObject_VectorcallMethod` to the :ref:`Limited API <stable>` along -with the auxiliary macro constant :c:macro:`PY_VECTORCALL_ARGUMENTS_OFFSET`. - -The availability of these functions enables more efficient :PEP:`590` vector -calls from binary extension modules that avoid argument boxing/unboxing -overheads. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-28-13-59-51.gh-issue-98811.XQypJa.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-28-13-59-51.gh-issue-98811.XQypJa.rst deleted file mode 100644 index ce90a516cf0df4..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-28-13-59-51.gh-issue-98811.XQypJa.rst +++ /dev/null @@ -1,4 +0,0 @@ -Use complete source locations to simplify detection of ``__future__`` -imports which are not at the beginning of the file. Also corrects the offset -in the exception raised in one case, which was off by one and impeded -highlighting. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-28-14-52-55.gh-issue-98783.iG0kMs.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-28-14-52-55.gh-issue-98783.iG0kMs.rst deleted file mode 100644 index da1e61ea850472..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-28-14-52-55.gh-issue-98783.iG0kMs.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix multiple crashes in debug mode when ``str`` subclasses -are used instead of ``str`` itself. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-31-21-01-35.gh-issue-98852.MYaRN6.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-31-21-01-35.gh-issue-98852.MYaRN6.rst deleted file mode 100644 index 0e15819a862bbf..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-31-21-01-35.gh-issue-98852.MYaRN6.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix subscription of type aliases containing bare generic types or types like -:class:`~typing.TypeVar`: for example ``tuple[A, T][int]`` and -``tuple[TypeVar, T][int]``, where ``A`` is a generic type, and ``T`` is a -type variable. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-19-01-11-06.gh-issue-99582.wvOBVy.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-19-01-11-06.gh-issue-99582.wvOBVy.rst new file mode 100644 index 00000000000000..320d47cb9cf6e2 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-19-01-11-06.gh-issue-99582.wvOBVy.rst @@ -0,0 +1 @@ +Freeze :mod:`zipimport` module into ``_bootstrap_python``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-12-06-22-24-01.gh-issue-100050.lcrPqQ.rst b/Misc/NEWS.d/next/Core and Builtins/2022-12-06-22-24-01.gh-issue-100050.lcrPqQ.rst new file mode 100644 index 00000000000000..8e7c72d804f82f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-12-06-22-24-01.gh-issue-100050.lcrPqQ.rst @@ -0,0 +1,2 @@ +Honor existing errors obtained when searching for mismatching parentheses in +the tokenizer. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-12-08-12-26-34.gh-issue-100110.ertac-.rst b/Misc/NEWS.d/next/Core and Builtins/2022-12-08-12-26-34.gh-issue-100110.ertac-.rst new file mode 100644 index 00000000000000..e494f44fd42cb7 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-12-08-12-26-34.gh-issue-100110.ertac-.rst @@ -0,0 +1 @@ +Specialize ``FOR_ITER`` for tuples. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-12-09-14-27-36.gh-issue-100143.5g9rb4.rst b/Misc/NEWS.d/next/Core and Builtins/2022-12-09-14-27-36.gh-issue-100143.5g9rb4.rst new file mode 100644 index 00000000000000..20a25f8b03d1d2 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-12-09-14-27-36.gh-issue-100143.5g9rb4.rst @@ -0,0 +1,3 @@ +When built with ``--enable-pystats``, stats collection is now off by +default. To enable it early at startup, pass the ``-Xpystats`` flag. Stats +are now always dumped, even if switched off. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-12-12-11-27-54.gh-issue-99955.Ix5Rrg.rst b/Misc/NEWS.d/next/Core and Builtins/2022-12-12-11-27-54.gh-issue-99955.Ix5Rrg.rst new file mode 100644 index 00000000000000..e9867b39ad7d1c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-12-12-11-27-54.gh-issue-99955.Ix5Rrg.rst @@ -0,0 +1 @@ +Internal compiler functions (in compile.c) now consistently return -1 on error and 0 on success. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-12-13-16-05-18.gh-issue-100222.OVVvYe.rst b/Misc/NEWS.d/next/Core and Builtins/2022-12-13-16-05-18.gh-issue-100222.OVVvYe.rst new file mode 100644 index 00000000000000..032b49420d8b27 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-12-13-16-05-18.gh-issue-100222.OVVvYe.rst @@ -0,0 +1,2 @@ +Redefine the ``_Py_CODEUNIT`` typedef as a union to describe its layout to +the C compiler, avoiding type punning and improving clarity. diff --git a/Misc/NEWS.d/next/Documentation/2022-10-06-13-00-28.gh-issue-97966.fz7kFg.rst b/Misc/NEWS.d/next/Documentation/2022-10-06-13-00-28.gh-issue-97966.fz7kFg.rst deleted file mode 100644 index 8240442a3bcb5e..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-10-06-13-00-28.gh-issue-97966.fz7kFg.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update uname docs to clarify the special nature of the platform attribute -and to indicate when it became late-bound. diff --git a/Misc/NEWS.d/next/Library/2020-04-30-02-15-08.bpo-40358.A4ygqe.rst b/Misc/NEWS.d/next/Library/2020-04-30-02-15-08.bpo-40358.A4ygqe.rst deleted file mode 100644 index a2815f54b031a1..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-30-02-15-08.bpo-40358.A4ygqe.rst +++ /dev/null @@ -1 +0,0 @@ -Add walk_up argument in :meth:`pathlib.PurePath.relative_to`. diff --git a/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst b/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst new file mode 100644 index 00000000000000..d585535ee38d20 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst @@ -0,0 +1 @@ +Fix reStructuredText syntax errors in docstrings in the :mod:`enum` module. diff --git a/Misc/NEWS.d/next/Library/2022-05-08-08-47-32.gh-issue-92452.3pNHe6.rst b/Misc/NEWS.d/next/Library/2022-05-08-08-47-32.gh-issue-92452.3pNHe6.rst deleted file mode 100644 index 25d6477aa9138a..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-08-08-47-32.gh-issue-92452.3pNHe6.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a race condition that could cause :func:`sysconfig.get_config_var` to -incorrectly return :const:`None` in multi-threaded programs. diff --git a/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst b/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst new file mode 100644 index 00000000000000..ad01f5e16b16a9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst @@ -0,0 +1,2 @@ +Reduced the memory usage of :func:`urllib.parse.unquote` and +:func:`urllib.parse.unquote_to_bytes` on large values. diff --git a/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst b/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst new file mode 100644 index 00000000000000..7768ed0817e8fa --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst @@ -0,0 +1,7 @@ +Add missing TCP socket options from Linux: ``TCP_MD5SIG``, +``TCP_THIN_LINEAR_TIMEOUTS``, ``TCP_THIN_DUPACK``, ``TCP_REPAIR``, +``TCP_REPAIR_QUEUE``, ``TCP_QUEUE_SEQ``, ``TCP_REPAIR_OPTIONS``, +``TCP_TIMESTAMP``, ``TCP_CC_INFO``, ``TCP_SAVE_SYN``, ``TCP_SAVED_SYN``, +``TCP_REPAIR_WINDOW``, ``TCP_FASTOPEN_CONNECT``, ``TCP_ULP``, +``TCP_MD5SIG_EXT``, ``TCP_FASTOPEN_KEY``, ``TCP_FASTOPEN_NO_COOKIE``, +``TCP_ZEROCOPY_RECEIVE``, ``TCP_INQ``, ``TCP_TX_DELAY``. diff --git a/Misc/NEWS.d/next/Library/2022-10-10-07-07-31.gh-issue-96151.K9fwoq.rst b/Misc/NEWS.d/next/Library/2022-10-10-07-07-31.gh-issue-96151.K9fwoq.rst deleted file mode 100644 index 700c9748735f8b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-10-07-07-31.gh-issue-96151.K9fwoq.rst +++ /dev/null @@ -1 +0,0 @@ -Allow ``BUILTINS`` to be a valid field name for frozen dataclasses. diff --git a/Misc/NEWS.d/next/Library/2022-10-25-07-00-31.gh-issue-98658.nGABW9.rst b/Misc/NEWS.d/next/Library/2022-10-25-07-00-31.gh-issue-98658.nGABW9.rst deleted file mode 100644 index 8909d494166268..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-25-07-00-31.gh-issue-98658.nGABW9.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :class:`array.array` class now supports subscripting, making it a -:term:`generic type`. diff --git a/Misc/NEWS.d/next/Library/2022-10-25-20-17-34.gh-issue-98624.YQUPFy.rst b/Misc/NEWS.d/next/Library/2022-10-25-20-17-34.gh-issue-98624.YQUPFy.rst deleted file mode 100644 index fb3a2b837fc3f9..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-25-20-17-34.gh-issue-98624.YQUPFy.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add a mutex to unittest.mock.NonCallableMock to protect concurrent access -to mock attributes. diff --git a/Misc/NEWS.d/next/Library/2022-10-26-07-51-55.gh-issue-98703.0hW773.rst b/Misc/NEWS.d/next/Library/2022-10-26-07-51-55.gh-issue-98703.0hW773.rst deleted file mode 100644 index 3107519a871430..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-26-07-51-55.gh-issue-98703.0hW773.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :meth:`asyncio.StreamWriter.drain` to call ``protocol.connection_lost`` -callback only once on Windows. diff --git a/Misc/NEWS.d/next/Library/2022-10-28-23-44-17.gh-issue-98744.sGHDWm.rst b/Misc/NEWS.d/next/Library/2022-10-28-23-44-17.gh-issue-98744.sGHDWm.rst deleted file mode 100644 index cf99ea51bf7c3c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-28-23-44-17.gh-issue-98744.sGHDWm.rst +++ /dev/null @@ -1,2 +0,0 @@ -Prevent crashing in :mod:`traceback` when retrieving the byte-offset for -some source files that contain certain unicode characters. diff --git a/Misc/NEWS.d/next/Library/2022-10-29-03-40-18.gh-issue-98793.WSPB4A.rst b/Misc/NEWS.d/next/Library/2022-10-29-03-40-18.gh-issue-98793.WSPB4A.rst deleted file mode 100644 index 7b67af06cf3d17..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-29-03-40-18.gh-issue-98793.WSPB4A.rst +++ /dev/null @@ -1 +0,0 @@ -Fix argument typechecks in :func:`!_overlapped.WSAConnect` and :func:`!_overlapped.Overlapped.WSASendTo` functions. diff --git a/Misc/NEWS.d/next/Library/2022-10-29-09-42-20.gh-issue-90352.t8QEPt.rst b/Misc/NEWS.d/next/Library/2022-10-29-09-42-20.gh-issue-90352.t8QEPt.rst deleted file mode 100644 index 8e80eae512e6d5..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-29-09-42-20.gh-issue-90352.t8QEPt.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``_SelectorDatagramTransport`` to inherit from :class:`~asyncio.DatagramTransport` in :mod:`asyncio`. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-10-30-22-42-48.gh-issue-98878.fgrykp.rst b/Misc/NEWS.d/next/Library/2022-10-30-22-42-48.gh-issue-98878.fgrykp.rst deleted file mode 100644 index e83422a77a2273..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-30-22-42-48.gh-issue-98878.fgrykp.rst +++ /dev/null @@ -1,2 +0,0 @@ -Use the frame bound builtins when offering a name suggestion in -:mod:`traceback` to prevent crashing when ``__builtins__`` is not a dict. diff --git a/Misc/NEWS.d/next/Library/2022-10-31-12-34-03.gh-issue-98897.rgNn4x.rst b/Misc/NEWS.d/next/Library/2022-10-31-12-34-03.gh-issue-98897.rgNn4x.rst deleted file mode 100644 index f61af2543c7f37..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-31-12-34-03.gh-issue-98897.rgNn4x.rst +++ /dev/null @@ -1 +0,0 @@ -Fix memory leak in :func:`math.dist` when both points don't have the same dimension. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst b/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst new file mode 100644 index 00000000000000..629d9aefb2d869 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst @@ -0,0 +1 @@ +Fix :func:`inspect.getsource` handling of decorator calls with nested parentheses. diff --git a/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst b/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst new file mode 100644 index 00000000000000..ed4754e49bd2cf --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst @@ -0,0 +1,5 @@ +Add :ref:`enum-dataclass-support` to the +:class:`~enum.Enum` :meth:`~enum.Enum.__repr__`. +When inheriting from a :class:`~dataclasses.dataclass`, +only show the field names in the value section of the member :func:`repr`, +and not the dataclass' class name. diff --git a/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst b/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst new file mode 100644 index 00000000000000..b1c170dff3eabc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst @@ -0,0 +1,2 @@ +Update :exc:`~urllib.error.HTTPError` to be initialized properly, even if +the ``fp`` is ``None``. Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst b/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst new file mode 100644 index 00000000000000..256f2bcd39f81d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst @@ -0,0 +1 @@ +Fix ``tuple`` subclasses being cast to ``tuple`` when used as enum values. diff --git a/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst b/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst new file mode 100644 index 00000000000000..7f290605934d76 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst @@ -0,0 +1,2 @@ +Fixes inconsistent handling of case sensitivity of *extrasaction* arg in +:class:`csv.DictWriter`. diff --git a/Misc/NEWS.d/next/Security/2022-10-26-21-04-23.gh-issue-98739.keBWcY.rst b/Misc/NEWS.d/next/Security/2022-10-26-21-04-23.gh-issue-98739.keBWcY.rst deleted file mode 100644 index b63a54b3676c65..00000000000000 --- a/Misc/NEWS.d/next/Security/2022-10-26-21-04-23.gh-issue-98739.keBWcY.rst +++ /dev/null @@ -1 +0,0 @@ -Update bundled libexpat to 2.5.0 diff --git a/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst b/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst new file mode 100644 index 00000000000000..a8fb98048e4023 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst @@ -0,0 +1 @@ +Make two tests forgiving towards host system libexpat with backported security fixes applied. diff --git a/Misc/NEWS.d/next/Tests/2022-10-12-14-57-06.gh-issue-96853.ANe-bw.rst b/Misc/NEWS.d/next/Tests/2022-10-12-14-57-06.gh-issue-96853.ANe-bw.rst deleted file mode 100644 index 89958c57721913..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-10-12-14-57-06.gh-issue-96853.ANe-bw.rst +++ /dev/null @@ -1,3 +0,0 @@ -Added explicit coverage of ``Py_Initialize`` (and hence ``Py_InitializeEx``) -back to the embedding tests (all other embedding tests migrated to -``Py_InitializeFromConfig`` in Python 3.11) diff --git a/Misc/NEWS.d/next/Tests/2022-10-15-07-46-48.gh-issue-87390.asR-Zo.rst b/Misc/NEWS.d/next/Tests/2022-10-15-07-46-48.gh-issue-87390.asR-Zo.rst deleted file mode 100644 index 181e12c7430b13..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-10-15-07-46-48.gh-issue-87390.asR-Zo.rst +++ /dev/null @@ -1 +0,0 @@ -Add tests for star-unpacking with PEP 646, and some other miscellaneous PEP 646 tests. diff --git a/Misc/NEWS.d/next/Tests/2022-10-26-15-19-20.gh-issue-98713.Lnu32R.rst b/Misc/NEWS.d/next/Tests/2022-10-26-15-19-20.gh-issue-98713.Lnu32R.rst deleted file mode 100644 index 57e58943df4907..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-10-26-15-19-20.gh-issue-98713.Lnu32R.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a bug in the :mod:`typing` tests where a test relying on CPython-specific -implementation details was not decorated with ``@cpython_only`` and was not -skipped on other implementations. diff --git a/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst b/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst new file mode 100644 index 00000000000000..a5f1bb9f5a5e05 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst @@ -0,0 +1,3 @@ +The Python test runner (libregrtest) now logs Python build information like +"debug" vs "release" build, or LTO and PGO optimizations. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst b/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst new file mode 100644 index 00000000000000..a5975b2d00c7bf --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst @@ -0,0 +1,3 @@ +:meth:`winreg.SetValueEx` now leaves the target value untouched in the case of conversion errors. +Previously, ``-1`` would be written in case of such errors. + diff --git a/Misc/NEWS.d/next/Windows/2022-10-25-10-34-17.gh-issue-94328.19NhdU.rst b/Misc/NEWS.d/next/Windows/2022-10-25-10-34-17.gh-issue-94328.19NhdU.rst deleted file mode 100644 index eb48ff9b6ec666..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-10-25-10-34-17.gh-issue-94328.19NhdU.rst +++ /dev/null @@ -1 +0,0 @@ -Update Windows installer to use SQLite 3.39.4. diff --git a/Misc/NEWS.d/next/Windows/2022-10-26-17-43-09.gh-issue-98692.bOopfZ.rst b/Misc/NEWS.d/next/Windows/2022-10-26-17-43-09.gh-issue-98692.bOopfZ.rst deleted file mode 100644 index 3a5efd9a1cfa34..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-10-26-17-43-09.gh-issue-98692.bOopfZ.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the :ref:`launcher` ignoring unrecognized shebang lines instead of -treating them as local paths diff --git a/Misc/NEWS.d/next/Windows/2022-10-27-20-30-16.gh-issue-98745.v06p4r.rst b/Misc/NEWS.d/next/Windows/2022-10-27-20-30-16.gh-issue-98745.v06p4r.rst deleted file mode 100644 index caf73db280f362..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-10-27-20-30-16.gh-issue-98745.v06p4r.rst +++ /dev/null @@ -1 +0,0 @@ -Update :file:`py.exe` launcher to install 3.11 by default and 3.12 on request. diff --git a/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst b/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst new file mode 100644 index 00000000000000..a019d7287207d8 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst @@ -0,0 +1,2 @@ +Ensure that :func:`asyncio.Protocol.data_received` receives an immutable +:class:`bytes` object (as documented), instead of :class:`bytearray`. diff --git a/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst b/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst new file mode 100644 index 00000000000000..e2e6ca3c7796e0 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst @@ -0,0 +1 @@ +Define ``MS_WIN64`` for Mingw-w64 64bit, fix cython compilation failure. diff --git a/Misc/NEWS.d/next/macOS/2022-10-25-10-32-23.gh-issue-94328.W3YNC_.rst b/Misc/NEWS.d/next/macOS/2022-10-25-10-32-23.gh-issue-94328.W3YNC_.rst deleted file mode 100644 index cbef54d17a82e9..00000000000000 --- a/Misc/NEWS.d/next/macOS/2022-10-25-10-32-23.gh-issue-94328.W3YNC_.rst +++ /dev/null @@ -1 +0,0 @@ -Update macOS installer to SQLite 3.39.4. diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml index 0ba0f51b2de451..c716f403d638ac 100644 --- a/Misc/stable_abi.toml +++ b/Misc/stable_abi.toml @@ -94,7 +94,7 @@ added = '3.2' struct_abi_kind = 'full-abi' [struct.PyMemberDef] - added = '3.2' + added = '3.2' # Before 3.12, PyMemberDef required #include "structmember.h" struct_abi_kind = 'full-abi' [struct.PyGetSetDef] added = '3.2' @@ -1777,11 +1777,9 @@ added = '3.2' abi_only = true [function.PyMember_GetOne] - added = '3.2' - abi_only = true + added = '3.2' # Before 3.12, available in "structmember.h" [function.PyMember_SetOne] - added = '3.2' - abi_only = true + added = '3.2' # Before 3.12, available in "structmember.h" # TLS api is deprecated; superseded by TSS API @@ -2273,6 +2271,50 @@ [function.PyMemoryView_FromBuffer] added = '3.11' +# Constants for Py_buffer API added to this list in Python 3.11.1 (https://github.com/python/cpython/issues/98680) +# (they were available with 3.11.0) +[const.PyBUF_MAX_NDIM] + added = '3.11' +[const.PyBUF_SIMPLE] + added = '3.11' +[const.PyBUF_WRITABLE] + added = '3.11' +[const.PyBUF_FORMAT] + added = '3.11' +[const.PyBUF_ND] + added = '3.11' +[const.PyBUF_STRIDES] + added = '3.11' +[const.PyBUF_C_CONTIGUOUS] + added = '3.11' +[const.PyBUF_F_CONTIGUOUS] + added = '3.11' +[const.PyBUF_ANY_CONTIGUOUS] + added = '3.11' +[const.PyBUF_INDIRECT] + added = '3.11' +[const.PyBUF_CONTIG] + added = '3.11' +[const.PyBUF_CONTIG_RO] + added = '3.11' +[const.PyBUF_STRIDED] + added = '3.11' +[const.PyBUF_STRIDED_RO] + added = '3.11' +[const.PyBUF_RECORDS] + added = '3.11' +[const.PyBUF_RECORDS_RO] + added = '3.11' +[const.PyBUF_FULL] + added = '3.11' +[const.PyBUF_FULL_RO] + added = '3.11' +[const.PyBUF_READ] + added = '3.11' +[const.PyBUF_WRITE] + added = '3.11' + + # (Detailed comments aren't really needed for further entries: from here on # we can use version control logs.) @@ -2303,3 +2345,44 @@ added = '3.12' [typedef.releasebufferproc] added = '3.12' + +[const.Py_T_BYTE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_SHORT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_INT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_LONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_LONGLONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_UBYTE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_UINT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_USHORT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_ULONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_ULONGLONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_PYSSIZET] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_FLOAT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_DOUBLE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_BOOL] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_STRING] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_STRING_INPLACE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_CHAR] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_OBJECT_EX] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_READONLY] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_AUDIT_READ] + added = '3.12' # Before 3.12, available in "structmember.h" diff --git a/Modules/Setup b/Modules/Setup index 8fe79512787cde..e3a82975b5ff5e 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -291,6 +291,7 @@ PYTHONPATH=$(COREPYTHONPATH) #_testcapi _testcapimodule.c #_testimportmultiple _testimportmultiple.c #_testmultiphase _testmultiphase.c +#_testsinglephase _testsinglephase.c # --- # Uncommenting the following line tells makesetup that all following modules diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index ac8959ebea5bf2..d64752e8ca9609 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -169,12 +169,14 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c +@MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c # Some testing modules MUST be built as shared libraries. *shared* @MODULE__TESTIMPORTMULTIPLE_TRUE@_testimportmultiple _testimportmultiple.c @MODULE__TESTMULTIPHASE_TRUE@_testmultiphase _testmultiphase.c +@MODULE__TESTMULTIPHASE_TRUE@_testsinglephase _testsinglephase.c @MODULE__CTYPES_TEST_TRUE@_ctypes_test _ctypes/_ctypes_test.c # Limited API template modules; must be built as shared modules. diff --git a/Modules/_abc.c b/Modules/_abc.c index b22daa81e3ae19..e146d4fd0cac39 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -524,8 +524,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass) } int result = PyObject_IsSubclass(subclass, self); if (result > 0) { - Py_INCREF(subclass); - return subclass; /* Already a subclass. */ + return Py_NewRef(subclass); /* Already a subclass. */ } if (result < 0) { return NULL; @@ -561,8 +560,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass) set_collection_flag_recursive((PyTypeObject *)subclass, collection_flag); } } - Py_INCREF(subclass); - return subclass; + return Py_NewRef(subclass); } @@ -598,8 +596,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, goto end; } if (incache > 0) { - result = Py_True; - Py_INCREF(result); + result = Py_NewRef(Py_True); goto end; } subtype = (PyObject *)Py_TYPE(instance); @@ -610,8 +607,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, goto end; } if (incache > 0) { - result = Py_False; - Py_INCREF(result); + result = Py_NewRef(Py_False); goto end; } } @@ -628,8 +624,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, switch (PyObject_IsTrue(result)) { case -1: - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); break; case 0: Py_DECREF(result); @@ -802,8 +797,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, end: Py_DECREF(impl); Py_XDECREF(subclasses); - Py_XINCREF(result); - return result; + return Py_XNewRef(result); } @@ -842,8 +836,7 @@ subclasscheck_check_registry(_abc_data *impl, PyObject *subclass, Py_ssize_t i = 0; while (_PySet_NextEntry(impl->_abc_registry, &pos, &key, &hash)) { - Py_INCREF(key); - copy[i++] = key; + copy[i++] = Py_NewRef(key); } assert(i == registry_size); diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index ab6219c322f9fd..60369d89dc39c9 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -1,11 +1,13 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" #include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_runtime_init.h" // _Py_ID() +#include "pycore_moduleobject.h" // _PyModule_GetState() +#include "structmember.h" // PyMemberDef #include <stddef.h> // offsetof() @@ -15,46 +17,81 @@ module _asyncio /*[clinic end generated code: output=da39a3ee5e6b4b0d input=8fd17862aa989c69]*/ -/* identifiers used from some functions */ -_Py_IDENTIFIER(__asyncio_running_event_loop__); -_Py_IDENTIFIER(_asyncio_future_blocking); -_Py_IDENTIFIER(add_done_callback); -_Py_IDENTIFIER(call_soon); -_Py_IDENTIFIER(cancel); -_Py_IDENTIFIER(get_event_loop); -_Py_IDENTIFIER(throw); +/* State of the _asyncio module */ +typedef struct { + PyTypeObject *FutureIterType; + PyTypeObject *TaskStepMethWrapper_Type; + PyTypeObject *FutureType; + PyTypeObject *TaskType; + PyTypeObject *PyRunningLoopHolder_Type; + PyObject *asyncio_mod; + PyObject *context_kwname; -/* State of the _asyncio module */ -static PyObject *asyncio_mod; -static PyObject *traceback_extract_stack; -static PyObject *asyncio_get_event_loop_policy; -static PyObject *asyncio_future_repr_func; -static PyObject *asyncio_iscoroutine_func; -static PyObject *asyncio_task_get_stack_func; -static PyObject *asyncio_task_print_stack_func; -static PyObject *asyncio_task_repr_func; -static PyObject *asyncio_InvalidStateError; -static PyObject *asyncio_CancelledError; -static PyObject *context_kwname; -static int module_initialized; + /* Dictionary containing tasks that are currently active in + all running event loops. {EventLoop: Task} */ + PyObject *current_tasks; + + /* WeakSet containing all alive tasks. */ + PyObject *all_tasks; + + /* An isinstance type cache for the 'is_coroutine()' function. */ + PyObject *iscoroutine_typecache; + + /* Imports from asyncio.events. */ + PyObject *asyncio_get_event_loop_policy; + + /* Imports from asyncio.base_futures. */ + PyObject *asyncio_future_repr_func; + + /* Imports from asyncio.exceptions. */ + PyObject *asyncio_CancelledError; + PyObject *asyncio_InvalidStateError; + + /* Imports from asyncio.base_tasks. */ + PyObject *asyncio_task_get_stack_func; + PyObject *asyncio_task_print_stack_func; + PyObject *asyncio_task_repr_func; + + /* Imports from asyncio.coroutines. */ + PyObject *asyncio_iscoroutine_func; -static PyObject *cached_running_holder; -static volatile uint64_t cached_running_holder_tsid; + /* Imports from traceback. */ + PyObject *traceback_extract_stack; -/* Counter for autogenerated Task names */ -static uint64_t task_name_counter = 0; + PyObject *cached_running_holder; // Borrowed ref. + volatile uint64_t cached_running_holder_tsid; -/* WeakSet containing all alive tasks. */ -static PyObject *all_tasks; + /* Counter for autogenerated Task names */ + uint64_t task_name_counter; +} asyncio_state; -/* Dictionary containing tasks that are currently active in - all running event loops. {EventLoop: Task} */ -static PyObject *current_tasks; +static inline asyncio_state * +get_asyncio_state(PyObject *mod) +{ + asyncio_state *state = _PyModule_GetState(mod); + assert(state != NULL); + return state; +} + +static inline asyncio_state * +get_asyncio_state_by_cls(PyTypeObject *cls) +{ + asyncio_state *state = (asyncio_state *)PyType_GetModuleState(cls); + assert(state != NULL); + return state; +} -/* An isinstance type cache for the 'is_coroutine()' function. */ -static PyObject *iscoroutine_typecache; +static struct PyModuleDef _asynciomodule; +static inline asyncio_state * +get_asyncio_state_by_def(PyObject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + PyObject *mod = PyType_GetModuleByDef(tp, &_asynciomodule); + assert(mod != NULL); + return get_asyncio_state(mod); +} typedef enum { STATE_PENDING, @@ -110,16 +147,11 @@ typedef struct { } PyRunningLoopHolder; -static PyTypeObject FutureType; -static PyTypeObject TaskType; -static PyTypeObject PyRunningLoopHolder_Type; - +#define Future_CheckExact(state, obj) Py_IS_TYPE(obj, state->FutureType) +#define Task_CheckExact(state, obj) Py_IS_TYPE(obj, state->TaskType) -#define Future_CheckExact(obj) Py_IS_TYPE(obj, &FutureType) -#define Task_CheckExact(obj) Py_IS_TYPE(obj, &TaskType) - -#define Future_Check(obj) PyObject_TypeCheck(obj, &FutureType) -#define Task_Check(obj) PyObject_TypeCheck(obj, &TaskType) +#define Future_Check(state, obj) PyObject_TypeCheck(obj, state->FutureType) +#define Task_Check(state, obj) PyObject_TypeCheck(obj, state->TaskType) #include "clinic/_asynciomodule.c.h" @@ -133,11 +165,11 @@ class _asyncio.Future "FutureObj *" "&Future_Type" /* Get FutureIter from Future */ static PyObject * future_new_iter(PyObject *); -static PyRunningLoopHolder * new_running_loop_holder(PyObject *); +static PyRunningLoopHolder * new_running_loop_holder(asyncio_state *, PyObject *); static int -_is_coroutine(PyObject *coro) +_is_coroutine(asyncio_state *state, PyObject *coro) { /* 'coro' is not a native coroutine, call asyncio.iscoroutine() to check if it's another coroutine flavour. @@ -145,7 +177,7 @@ _is_coroutine(PyObject *coro) Do this check after 'future_init()'; in case we need to raise an error, __del__ needs a properly initialized object. */ - PyObject *res = PyObject_CallOneArg(asyncio_iscoroutine_func, coro); + PyObject *res = PyObject_CallOneArg(state->asyncio_iscoroutine_func, coro); if (res == NULL) { return -1; } @@ -156,12 +188,12 @@ _is_coroutine(PyObject *coro) return is_res_true; } - if (PySet_GET_SIZE(iscoroutine_typecache) < 100) { + if (PySet_GET_SIZE(state->iscoroutine_typecache) < 100) { /* Just in case we don't want to cache more than 100 positive types. That shouldn't ever happen, unless someone stressing the system on purpose. */ - if (PySet_Add(iscoroutine_typecache, (PyObject*) Py_TYPE(coro))) { + if (PySet_Add(state->iscoroutine_typecache, (PyObject*) Py_TYPE(coro))) { return -1; } } @@ -171,7 +203,7 @@ _is_coroutine(PyObject *coro) static inline int -is_coroutine(PyObject *coro) +is_coroutine(asyncio_state *state, PyObject *coro) { if (PyCoro_CheckExact(coro)) { return 1; @@ -187,10 +219,10 @@ is_coroutine(PyObject *coro) a pure-Python function in 99.9% cases. */ int has_it = PySet_Contains( - iscoroutine_typecache, (PyObject*) Py_TYPE(coro)); + state->iscoroutine_typecache, (PyObject*) Py_TYPE(coro)); if (has_it == 0) { /* type(coro) is not in iscoroutine_typecache */ - return _is_coroutine(coro); + return _is_coroutine(state, coro); } /* either an error has occurred or @@ -201,21 +233,18 @@ is_coroutine(PyObject *coro) static PyObject * -get_future_loop(PyObject *fut) +get_future_loop(asyncio_state *state, PyObject *fut) { /* Implementation of `asyncio.futures._get_loop` */ - _Py_IDENTIFIER(get_loop); - _Py_IDENTIFIER(_loop); PyObject *getloop; - if (Future_CheckExact(fut) || Task_CheckExact(fut)) { + if (Future_CheckExact(state, fut) || Task_CheckExact(state, fut)) { PyObject *loop = ((FutureObj *)fut)->fut_loop; - Py_INCREF(loop); - return loop; + return Py_NewRef(loop); } - if (_PyObject_LookupAttrId(fut, &PyId_get_loop, &getloop) < 0) { + if (_PyObject_LookupAttr(fut, &_Py_ID(get_loop), &getloop) < 0) { return NULL; } if (getloop != NULL) { @@ -224,20 +253,22 @@ get_future_loop(PyObject *fut) return res; } - return _PyObject_GetAttrId(fut, &PyId__loop); + return PyObject_GetAttr(fut, &_Py_ID(_loop)); } static int -get_running_loop(PyObject **loop) +get_running_loop(asyncio_state *state, PyObject **loop) { PyObject *rl; PyThreadState *ts = _PyThreadState_GET(); uint64_t ts_id = PyThreadState_GetID(ts); - if (ts_id == cached_running_holder_tsid && cached_running_holder != NULL) { + if (state->cached_running_holder_tsid == ts_id && + state->cached_running_holder != NULL) + { // Fast path, check the cache. - rl = cached_running_holder; // borrowed + rl = state->cached_running_holder; // borrowed } else { PyObject *ts_dict = _PyThreadState_GetDict(ts); // borrowed @@ -245,8 +276,8 @@ get_running_loop(PyObject **loop) goto not_found; } - rl = _PyDict_GetItemIdWithError( - ts_dict, &PyId___asyncio_running_event_loop__); // borrowed + rl = PyDict_GetItemWithError( + ts_dict, &_Py_ID(__asyncio_running_event_loop__)); // borrowed if (rl == NULL) { if (PyErr_Occurred()) { goto error; @@ -256,11 +287,11 @@ get_running_loop(PyObject **loop) } } - cached_running_holder = rl; // borrowed - cached_running_holder_tsid = ts_id; + state->cached_running_holder = rl; // borrowed + state->cached_running_holder_tsid = ts_id; } - assert(Py_IS_TYPE(rl, &PyRunningLoopHolder_Type)); + assert(Py_IS_TYPE(rl, state->PyRunningLoopHolder_Type)); PyObject *running_loop = ((PyRunningLoopHolder *)rl)->rl_loop; if (running_loop == Py_None) { @@ -276,8 +307,7 @@ get_running_loop(PyObject **loop) } #endif - Py_INCREF(running_loop); - *loop = running_loop; + *loop = Py_NewRef(running_loop); return 0; not_found: @@ -291,7 +321,7 @@ get_running_loop(PyObject **loop) static int -set_running_loop(PyObject *loop) +set_running_loop(asyncio_state *state, PyObject *loop) { PyObject *ts_dict = NULL; @@ -306,72 +336,66 @@ set_running_loop(PyObject *loop) return -1; } - PyRunningLoopHolder *rl = new_running_loop_holder(loop); + PyRunningLoopHolder *rl = new_running_loop_holder(state, loop); if (rl == NULL) { return -1; } - if (_PyDict_SetItemId( - ts_dict, &PyId___asyncio_running_event_loop__, (PyObject *)rl) < 0) + if (PyDict_SetItem( + ts_dict, &_Py_ID(__asyncio_running_event_loop__), (PyObject *)rl) < 0) { Py_DECREF(rl); // will cleanup loop & current_pid return -1; } Py_DECREF(rl); - cached_running_holder = (PyObject *)rl; - cached_running_holder_tsid = PyThreadState_GetID(tstate); + state->cached_running_holder = (PyObject *)rl; + state->cached_running_holder_tsid = PyThreadState_GetID(tstate); return 0; } static PyObject * -get_event_loop(int stacklevel) +get_event_loop(asyncio_state *state) { PyObject *loop; PyObject *policy; - if (get_running_loop(&loop)) { + if (get_running_loop(state, &loop)) { return NULL; } if (loop != NULL) { return loop; } - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "There is no current event loop", - stacklevel)) - { - return NULL; - } - - policy = PyObject_CallNoArgs(asyncio_get_event_loop_policy); + policy = PyObject_CallNoArgs(state->asyncio_get_event_loop_policy); if (policy == NULL) { return NULL; } - loop = _PyObject_CallMethodIdNoArgs(policy, &PyId_get_event_loop); + loop = PyObject_CallMethodNoArgs(policy, &_Py_ID(get_event_loop)); Py_DECREF(policy); return loop; } static int -call_soon(PyObject *loop, PyObject *func, PyObject *arg, PyObject *ctx) +call_soon(asyncio_state *state, PyObject *loop, PyObject *func, PyObject *arg, + PyObject *ctx) { PyObject *handle; PyObject *stack[3]; Py_ssize_t nargs; if (ctx == NULL) { - handle = _PyObject_CallMethodIdObjArgs( - loop, &PyId_call_soon, func, arg, NULL); + handle = PyObject_CallMethodObjArgs( + loop, &_Py_ID(call_soon), func, arg, NULL); } else { /* Use FASTCALL to pass a keyword-only argument to call_soon */ - PyObject *callable = _PyObject_GetAttrId(loop, &PyId_call_soon); + PyObject *callable = PyObject_GetAttr(loop, &_Py_ID(call_soon)); if (callable == NULL) { return -1; } @@ -385,7 +409,8 @@ call_soon(PyObject *loop, PyObject *func, PyObject *arg, PyObject *ctx) } stack[nargs] = (PyObject *)ctx; EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_API, callable); - handle = PyObject_Vectorcall(callable, stack, nargs, context_kwname); + handle = PyObject_Vectorcall(callable, stack, nargs, + state->context_kwname); Py_DECREF(callable); } @@ -416,17 +441,18 @@ future_ensure_alive(FutureObj *fut) } -#define ENSURE_FUTURE_ALIVE(fut) \ - do { \ - assert(Future_Check(fut) || Task_Check(fut)); \ - if (future_ensure_alive((FutureObj*)fut)) { \ - return NULL; \ - } \ +#define ENSURE_FUTURE_ALIVE(state, fut) \ + do { \ + assert(Future_Check(state, fut) || Task_Check(state, fut)); \ + (void)state; \ + if (future_ensure_alive((FutureObj*)fut)) { \ + return NULL; \ + } \ } while(0); static int -future_schedule_callbacks(FutureObj *fut) +future_schedule_callbacks(asyncio_state *state, FutureObj *fut) { Py_ssize_t len; Py_ssize_t i; @@ -434,7 +460,7 @@ future_schedule_callbacks(FutureObj *fut) if (fut->fut_callback0 != NULL) { /* There's a 1st callback */ - int ret = call_soon( + int ret = call_soon(state, fut->fut_loop, fut->fut_callback0, (PyObject *)fut, fut->fut_context0); @@ -468,7 +494,7 @@ future_schedule_callbacks(FutureObj *fut) PyObject *cb = PyTuple_GET_ITEM(cb_tup, 0); PyObject *ctx = PyTuple_GET_ITEM(cb_tup, 1); - if (call_soon(fut->fut_loop, cb, (PyObject *)fut, ctx)) { + if (call_soon(state, fut->fut_loop, cb, (PyObject *)fut, ctx)) { /* If an error occurs in pure-Python implementation, all callbacks are cleared. */ Py_CLEAR(fut->fut_callbacks); @@ -486,7 +512,6 @@ future_init(FutureObj *fut, PyObject *loop) { PyObject *res; int is_true; - _Py_IDENTIFIER(get_debug); // Same to FutureObj_clear() but not clearing fut->dict Py_CLEAR(fut->fut_loop); @@ -505,7 +530,8 @@ future_init(FutureObj *fut, PyObject *loop) fut->fut_blocking = 0; if (loop == Py_None) { - loop = get_event_loop(1); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); + loop = get_event_loop(state); if (loop == NULL) { return -1; } @@ -515,7 +541,7 @@ future_init(FutureObj *fut, PyObject *loop) } fut->fut_loop = loop; - res = _PyObject_CallMethodIdNoArgs(fut->fut_loop, &PyId_get_debug); + res = PyObject_CallMethodNoArgs(fut->fut_loop, &_Py_ID(get_debug)); if (res == NULL) { return -1; } @@ -531,7 +557,8 @@ future_init(FutureObj *fut, PyObject *loop) method, which is called during the interpreter shutdown and the traceback module is already unloaded. */ - fut->fut_source_tb = PyObject_CallNoArgs(traceback_extract_stack); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); + fut->fut_source_tb = PyObject_CallNoArgs(state->traceback_extract_stack); if (fut->fut_source_tb == NULL) { return -1; } @@ -541,35 +568,34 @@ future_init(FutureObj *fut, PyObject *loop) } static PyObject * -future_set_result(FutureObj *fut, PyObject *res) +future_set_result(asyncio_state *state, FutureObj *fut, PyObject *res) { if (future_ensure_alive(fut)) { return NULL; } if (fut->fut_state != STATE_PENDING) { - PyErr_SetString(asyncio_InvalidStateError, "invalid state"); + PyErr_SetString(state->asyncio_InvalidStateError, "invalid state"); return NULL; } assert(!fut->fut_result); - Py_INCREF(res); - fut->fut_result = res; + fut->fut_result = Py_NewRef(res); fut->fut_state = STATE_FINISHED; - if (future_schedule_callbacks(fut) == -1) { + if (future_schedule_callbacks(state, fut) == -1) { return NULL; } Py_RETURN_NONE; } static PyObject * -future_set_exception(FutureObj *fut, PyObject *exc) +future_set_exception(asyncio_state *state, FutureObj *fut, PyObject *exc) { PyObject *exc_val = NULL; if (fut->fut_state != STATE_PENDING) { - PyErr_SetString(asyncio_InvalidStateError, "invalid state"); + PyErr_SetString(state->asyncio_InvalidStateError, "invalid state"); return NULL; } @@ -580,13 +606,12 @@ future_set_exception(FutureObj *fut, PyObject *exc) } if (fut->fut_state != STATE_PENDING) { Py_DECREF(exc_val); - PyErr_SetString(asyncio_InvalidStateError, "invalid state"); + PyErr_SetString(state->asyncio_InvalidStateError, "invalid state"); return NULL; } } else { - exc_val = exc; - Py_INCREF(exc_val); + exc_val = Py_NewRef(exc); } if (!PyExceptionInstance_Check(exc_val)) { Py_DECREF(exc_val); @@ -607,7 +632,7 @@ future_set_exception(FutureObj *fut, PyObject *exc) fut->fut_exception_tb = PyException_GetTraceback(exc_val); fut->fut_state = STATE_FINISHED; - if (future_schedule_callbacks(fut) == -1) { + if (future_schedule_callbacks(state, fut) == -1) { return NULL; } @@ -616,7 +641,7 @@ future_set_exception(FutureObj *fut, PyObject *exc) } static PyObject * -create_cancelled_error(FutureObj *fut) +create_cancelled_error(asyncio_state *state, FutureObj *fut) { PyObject *exc; if (fut->fut_cancelled_exc != NULL) { @@ -627,34 +652,35 @@ create_cancelled_error(FutureObj *fut) } PyObject *msg = fut->fut_cancel_msg; if (msg == NULL || msg == Py_None) { - exc = PyObject_CallNoArgs(asyncio_CancelledError); + exc = PyObject_CallNoArgs(state->asyncio_CancelledError); } else { - exc = PyObject_CallOneArg(asyncio_CancelledError, msg); + exc = PyObject_CallOneArg(state->asyncio_CancelledError, msg); } return exc; } static void -future_set_cancelled_error(FutureObj *fut) +future_set_cancelled_error(asyncio_state *state, FutureObj *fut) { - PyObject *exc = create_cancelled_error(fut); + PyObject *exc = create_cancelled_error(state, fut); if (exc == NULL) { return; } - PyErr_SetObject(asyncio_CancelledError, exc); + PyErr_SetObject(state->asyncio_CancelledError, exc); Py_DECREF(exc); } static int -future_get_result(FutureObj *fut, PyObject **result) +future_get_result(asyncio_state *state, FutureObj *fut, PyObject **result) { if (fut->fut_state == STATE_CANCELLED) { - future_set_cancelled_error(fut); + future_set_cancelled_error(state, fut); return -1; } if (fut->fut_state != STATE_FINISHED) { - PyErr_SetString(asyncio_InvalidStateError, "Result is not set."); + PyErr_SetString(state->asyncio_InvalidStateError, + "Result is not set."); return -1; } @@ -667,19 +693,18 @@ future_get_result(FutureObj *fut, PyObject **result) if (PyException_SetTraceback(fut->fut_exception, tb) < 0) { return -1; } - Py_INCREF(fut->fut_exception); - *result = fut->fut_exception; + *result = Py_NewRef(fut->fut_exception); Py_CLEAR(fut->fut_exception_tb); return 1; } - Py_INCREF(fut->fut_result); - *result = fut->fut_result; + *result = Py_NewRef(fut->fut_result); return 0; } static PyObject * -future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx) +future_add_done_callback(asyncio_state *state, FutureObj *fut, PyObject *arg, + PyObject *ctx) { if (!future_is_alive(fut)) { PyErr_SetString(PyExc_RuntimeError, "uninitialized Future object"); @@ -689,7 +714,7 @@ future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx) if (fut->fut_state != STATE_PENDING) { /* The future is done/cancelled, so schedule the callback right away. */ - if (call_soon(fut->fut_loop, arg, (PyObject*) fut, ctx)) { + if (call_soon(state, fut->fut_loop, arg, (PyObject*) fut, ctx)) { return NULL; } } @@ -716,10 +741,8 @@ future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx) */ if (fut->fut_callbacks == NULL && fut->fut_callback0 == NULL) { - Py_INCREF(arg); - fut->fut_callback0 = arg; - Py_INCREF(ctx); - fut->fut_context0 = ctx; + fut->fut_callback0 = Py_NewRef(arg); + fut->fut_context0 = Py_NewRef(ctx); } else { PyObject *tup = PyTuple_New(2); @@ -755,7 +778,7 @@ future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx) } static PyObject * -future_cancel(FutureObj *fut, PyObject *msg) +future_cancel(asyncio_state *state, FutureObj *fut, PyObject *msg) { fut->fut_log_tb = 0; @@ -767,7 +790,7 @@ future_cancel(FutureObj *fut, PyObject *msg) Py_XINCREF(msg); Py_XSETREF(fut->fut_cancel_msg, msg); - if (future_schedule_callbacks(fut) == -1) { + if (future_schedule_callbacks(state, fut) == -1) { return NULL; } @@ -822,6 +845,7 @@ FutureObj_clear(FutureObj *fut) static int FutureObj_traverse(FutureObj *fut, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(fut)); Py_VISIT(fut->fut_loop); Py_VISIT(fut->fut_callback0); Py_VISIT(fut->fut_context0); @@ -850,15 +874,16 @@ static PyObject * _asyncio_Future_result_impl(FutureObj *self) /*[clinic end generated code: output=f35f940936a4b1e5 input=49ecf9cf5ec50dc5]*/ { + asyncio_state *state = get_asyncio_state_by_def((PyObject *)self); PyObject *result; if (!future_is_alive(self)) { - PyErr_SetString(asyncio_InvalidStateError, + PyErr_SetString(state->asyncio_InvalidStateError, "Future object is not initialized."); return NULL; } - int res = future_get_result(self, &result); + int res = future_get_result(state, self, &result); if (res == -1) { return NULL; @@ -878,6 +903,9 @@ _asyncio_Future_result_impl(FutureObj *self) /*[clinic input] _asyncio.Future.exception + cls: defining_class + / + Return the exception that was set on this future. The exception (or None if no exception was set) is returned only if @@ -887,29 +915,32 @@ InvalidStateError. [clinic start generated code]*/ static PyObject * -_asyncio_Future_exception_impl(FutureObj *self) -/*[clinic end generated code: output=88b20d4f855e0710 input=733547a70c841c68]*/ +_asyncio_Future_exception_impl(FutureObj *self, PyTypeObject *cls) +/*[clinic end generated code: output=ce75576b187c905b input=3faf15c22acdb60d]*/ { if (!future_is_alive(self)) { - PyErr_SetString(asyncio_InvalidStateError, + asyncio_state *state = get_asyncio_state_by_cls(cls); + PyErr_SetString(state->asyncio_InvalidStateError, "Future object is not initialized."); return NULL; } if (self->fut_state == STATE_CANCELLED) { - future_set_cancelled_error(self); + asyncio_state *state = get_asyncio_state_by_cls(cls); + future_set_cancelled_error(state, self); return NULL; } if (self->fut_state != STATE_FINISHED) { - PyErr_SetString(asyncio_InvalidStateError, "Exception is not set."); + asyncio_state *state = get_asyncio_state_by_cls(cls); + PyErr_SetString(state->asyncio_InvalidStateError, + "Exception is not set."); return NULL; } if (self->fut_exception != NULL) { self->fut_log_tb = 0; - Py_INCREF(self->fut_exception); - return self->fut_exception; + return Py_NewRef(self->fut_exception); } Py_RETURN_NONE; @@ -918,6 +949,7 @@ _asyncio_Future_exception_impl(FutureObj *self) /*[clinic input] _asyncio.Future.set_result + cls: defining_class result: object / @@ -928,16 +960,19 @@ InvalidStateError. [clinic start generated code]*/ static PyObject * -_asyncio_Future_set_result(FutureObj *self, PyObject *result) -/*[clinic end generated code: output=1ec2e6bcccd6f2ce input=8b75172c2a7b05f1]*/ +_asyncio_Future_set_result_impl(FutureObj *self, PyTypeObject *cls, + PyObject *result) +/*[clinic end generated code: output=99afbbe78f99c32d input=d5a41c1e353acc2e]*/ { - ENSURE_FUTURE_ALIVE(self) - return future_set_result(self, result); + asyncio_state *state = get_asyncio_state_by_cls(cls); + ENSURE_FUTURE_ALIVE(state, self) + return future_set_result(state, self, result); } /*[clinic input] _asyncio.Future.set_exception + cls: defining_class exception: object / @@ -948,16 +983,19 @@ InvalidStateError. [clinic start generated code]*/ static PyObject * -_asyncio_Future_set_exception(FutureObj *self, PyObject *exception) -/*[clinic end generated code: output=f1c1b0cd321be360 input=e45b7d7aa71cc66d]*/ +_asyncio_Future_set_exception_impl(FutureObj *self, PyTypeObject *cls, + PyObject *exception) +/*[clinic end generated code: output=0a5e8b5a52f058d6 input=a245cd49d3df939b]*/ { - ENSURE_FUTURE_ALIVE(self) - return future_set_exception(self, exception); + asyncio_state *state = get_asyncio_state_by_cls(cls); + ENSURE_FUTURE_ALIVE(state, self) + return future_set_exception(state, self, exception); } /*[clinic input] _asyncio.Future.add_done_callback + cls: defining_class fn: object / * @@ -971,25 +1009,27 @@ scheduled with call_soon. [clinic start generated code]*/ static PyObject * -_asyncio_Future_add_done_callback_impl(FutureObj *self, PyObject *fn, - PyObject *context) -/*[clinic end generated code: output=7ce635bbc9554c1e input=15ab0693a96e9533]*/ +_asyncio_Future_add_done_callback_impl(FutureObj *self, PyTypeObject *cls, + PyObject *fn, PyObject *context) +/*[clinic end generated code: output=922e9a4cbd601167 input=599261c521458cc2]*/ { + asyncio_state *state = get_asyncio_state_by_cls(cls); if (context == NULL) { context = PyContext_CopyCurrent(); if (context == NULL) { return NULL; } - PyObject *res = future_add_done_callback(self, fn, context); + PyObject *res = future_add_done_callback(state, self, fn, context); Py_DECREF(context); return res; } - return future_add_done_callback(self, fn, context); + return future_add_done_callback(state, self, fn, context); } /*[clinic input] _asyncio.Future.remove_done_callback + cls: defining_class fn: object / @@ -999,14 +1039,16 @@ Returns the number of callbacks removed. [clinic start generated code]*/ static PyObject * -_asyncio_Future_remove_done_callback(FutureObj *self, PyObject *fn) -/*[clinic end generated code: output=5ab1fb52b24ef31f input=0a43280a149d505b]*/ +_asyncio_Future_remove_done_callback_impl(FutureObj *self, PyTypeObject *cls, + PyObject *fn) +/*[clinic end generated code: output=2da35ccabfe41b98 input=c7518709b86fc747]*/ { PyObject *newlist; Py_ssize_t len, i, j=0; Py_ssize_t cleared_callback0 = 0; - ENSURE_FUTURE_ALIVE(self) + asyncio_state *state = get_asyncio_state_by_cls(cls); + ENSURE_FUTURE_ALIVE(state, self) if (self->fut_callback0 != NULL) { int cmp = PyObject_RichCompareBool(self->fut_callback0, fn, Py_EQ); @@ -1103,6 +1145,8 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyObject *fn) /*[clinic input] _asyncio.Future.cancel + cls: defining_class + / msg: object = None Cancel the future and schedule callbacks. @@ -1113,11 +1157,13 @@ return True. [clinic start generated code]*/ static PyObject * -_asyncio_Future_cancel_impl(FutureObj *self, PyObject *msg) -/*[clinic end generated code: output=3edebbc668e5aba3 input=925eb545251f2c5a]*/ +_asyncio_Future_cancel_impl(FutureObj *self, PyTypeObject *cls, + PyObject *msg) +/*[clinic end generated code: output=074956f35904b034 input=bba8f8b786941a94]*/ { - ENSURE_FUTURE_ALIVE(self) - return future_cancel(self, msg); + asyncio_state *state = get_asyncio_state_by_cls(cls); + ENSURE_FUTURE_ALIVE(state, self) + return future_cancel(state, self, msg); } /*[clinic input] @@ -1162,16 +1208,19 @@ _asyncio_Future_done_impl(FutureObj *self) /*[clinic input] _asyncio.Future.get_loop + cls: defining_class + / + Return the event loop the Future is bound to. [clinic start generated code]*/ static PyObject * -_asyncio_Future_get_loop_impl(FutureObj *self) -/*[clinic end generated code: output=119b6ea0c9816c3f input=cba48c2136c79d1f]*/ +_asyncio_Future_get_loop_impl(FutureObj *self, PyTypeObject *cls) +/*[clinic end generated code: output=f50ea6c374d9ee97 input=163c2c498b45a1f0]*/ { - ENSURE_FUTURE_ALIVE(self) - Py_INCREF(self->fut_loop); - return self->fut_loop; + asyncio_state *state = get_asyncio_state_by_cls(cls); + ENSURE_FUTURE_ALIVE(state, self) + return Py_NewRef(self->fut_loop); } static PyObject * @@ -1207,7 +1256,8 @@ FutureObj_set_blocking(FutureObj *fut, PyObject *val, void *Py_UNUSED(ignored)) static PyObject * FutureObj_get_log_traceback(FutureObj *fut, void *Py_UNUSED(ignored)) { - ENSURE_FUTURE_ALIVE(fut) + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); + ENSURE_FUTURE_ALIVE(state, fut) if (fut->fut_log_tb) { Py_RETURN_TRUE; } @@ -1242,24 +1292,23 @@ FutureObj_get_loop(FutureObj *fut, void *Py_UNUSED(ignored)) if (!future_is_alive(fut)) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_loop); - return fut->fut_loop; + return Py_NewRef(fut->fut_loop); } static PyObject * FutureObj_get_callbacks(FutureObj *fut, void *Py_UNUSED(ignored)) { + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); Py_ssize_t i; - ENSURE_FUTURE_ALIVE(fut) + ENSURE_FUTURE_ALIVE(state, fut) if (fut->fut_callback0 == NULL) { if (fut->fut_callbacks == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_callbacks); - return fut->fut_callbacks; + return Py_NewRef(fut->fut_callbacks); } Py_ssize_t len = 1; @@ -1301,23 +1350,23 @@ FutureObj_get_callbacks(FutureObj *fut, void *Py_UNUSED(ignored)) static PyObject * FutureObj_get_result(FutureObj *fut, void *Py_UNUSED(ignored)) { - ENSURE_FUTURE_ALIVE(fut) + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); + ENSURE_FUTURE_ALIVE(state, fut) if (fut->fut_result == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_result); - return fut->fut_result; + return Py_NewRef(fut->fut_result); } static PyObject * FutureObj_get_exception(FutureObj *fut, void *Py_UNUSED(ignored)) { - ENSURE_FUTURE_ALIVE(fut) + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); + ENSURE_FUTURE_ALIVE(state, fut) if (fut->fut_exception == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_exception); - return fut->fut_exception; + return Py_NewRef(fut->fut_exception); } static PyObject * @@ -1326,8 +1375,7 @@ FutureObj_get_source_traceback(FutureObj *fut, void *Py_UNUSED(ignored)) if (!future_is_alive(fut) || fut->fut_source_tb == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_source_tb); - return fut->fut_source_tb; + return Py_NewRef(fut->fut_source_tb); } static PyObject * @@ -1336,8 +1384,7 @@ FutureObj_get_cancel_message(FutureObj *fut, void *Py_UNUSED(ignored)) if (fut->fut_cancel_msg == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_cancel_msg); - return fut->fut_cancel_msg; + return Py_NewRef(fut->fut_cancel_msg); } static int @@ -1356,35 +1403,33 @@ FutureObj_set_cancel_message(FutureObj *fut, PyObject *msg, static PyObject * FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored)) { - _Py_IDENTIFIER(PENDING); - _Py_IDENTIFIER(CANCELLED); - _Py_IDENTIFIER(FINISHED); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); PyObject *ret = NULL; - ENSURE_FUTURE_ALIVE(fut) + ENSURE_FUTURE_ALIVE(state, fut) switch (fut->fut_state) { case STATE_PENDING: - ret = _PyUnicode_FromId(&PyId_PENDING); + ret = &_Py_ID(PENDING); break; case STATE_CANCELLED: - ret = _PyUnicode_FromId(&PyId_CANCELLED); + ret = &_Py_ID(CANCELLED); break; case STATE_FINISHED: - ret = _PyUnicode_FromId(&PyId_FINISHED); + ret = &_Py_ID(FINISHED); break; default: assert (0); } - Py_XINCREF(ret); - return ret; + return Py_XNewRef(ret); } static PyObject * FutureObj_repr(FutureObj *fut) { - ENSURE_FUTURE_ALIVE(fut) - return PyObject_CallOneArg(asyncio_future_repr_func, (PyObject *)fut); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); + ENSURE_FUTURE_ALIVE(state, fut) + return PyObject_CallOneArg(state->asyncio_future_repr_func, (PyObject *)fut); } /*[clinic input] @@ -1400,18 +1445,13 @@ static PyObject * _asyncio_Future__make_cancelled_error_impl(FutureObj *self) /*[clinic end generated code: output=a5df276f6c1213de input=ac6effe4ba795ecc]*/ { - return create_cancelled_error(self); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)self); + return create_cancelled_error(state, self); } static void FutureObj_finalize(FutureObj *fut) { - _Py_IDENTIFIER(call_exception_handler); - _Py_IDENTIFIER(message); - _Py_IDENTIFIER(exception); - _Py_IDENTIFIER(future); - _Py_IDENTIFIER(source_traceback); - PyObject *error_type, *error_value, *error_traceback; PyObject *context; PyObject *message = NULL; @@ -1437,19 +1477,19 @@ FutureObj_finalize(FutureObj *fut) goto finally; } - if (_PyDict_SetItemId(context, &PyId_message, message) < 0 || - _PyDict_SetItemId(context, &PyId_exception, fut->fut_exception) < 0 || - _PyDict_SetItemId(context, &PyId_future, (PyObject*)fut) < 0) { + if (PyDict_SetItem(context, &_Py_ID(message), message) < 0 || + PyDict_SetItem(context, &_Py_ID(exception), fut->fut_exception) < 0 || + PyDict_SetItem(context, &_Py_ID(future), (PyObject*)fut) < 0) { goto finally; } if (fut->fut_source_tb != NULL) { - if (_PyDict_SetItemId(context, &PyId_source_traceback, + if (PyDict_SetItem(context, &_Py_ID(source_traceback), fut->fut_source_tb) < 0) { goto finally; } } - func = _PyObject_GetAttrId(fut->fut_loop, &PyId_call_exception_handler); + func = PyObject_GetAttr(fut->fut_loop, &_Py_ID(call_exception_handler)); if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { @@ -1469,13 +1509,6 @@ FutureObj_finalize(FutureObj *fut) PyErr_Restore(error_type, error_value, error_traceback); } -static PyAsyncMethods FutureType_as_async = { - (unaryfunc)future_new_iter, /* am_await */ - 0, /* am_aiter */ - 0, /* am_anext */ - 0, /* am_send */ -}; - static PyMethodDef FutureType_methods[] = { _ASYNCIO_FUTURE_RESULT_METHODDEF _ASYNCIO_FUTURE_EXCEPTION_METHODDEF @@ -1492,6 +1525,12 @@ static PyMethodDef FutureType_methods[] = { {NULL, NULL} /* Sentinel */ }; +static PyMemberDef FutureType_members[] = { + {"__weaklistoffset__", T_PYSSIZET, offsetof(FutureObj, fut_weakreflist), READONLY}, + {"__dictoffset__", T_PYSSIZET, offsetof(FutureObj, dict), READONLY}, + {NULL}, +}; + #define FUTURE_COMMON_GETSETLIST \ {"_state", (getter)FutureObj_get_state, NULL, NULL}, \ {"_asyncio_future_blocking", (getter)FutureObj_get_blocking, \ @@ -1514,25 +1553,31 @@ static PyGetSetDef FutureType_getsetlist[] = { static void FutureObj_dealloc(PyObject *self); -static PyTypeObject FutureType = { - PyVarObject_HEAD_INIT(NULL, 0) - "_asyncio.Future", - sizeof(FutureObj), /* tp_basicsize */ - .tp_dealloc = FutureObj_dealloc, - .tp_as_async = &FutureType_as_async, - .tp_repr = (reprfunc)FutureObj_repr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - .tp_doc = _asyncio_Future___init____doc__, - .tp_traverse = (traverseproc)FutureObj_traverse, - .tp_clear = (inquiry)FutureObj_clear, - .tp_weaklistoffset = offsetof(FutureObj, fut_weakreflist), - .tp_iter = (getiterfunc)future_new_iter, - .tp_methods = FutureType_methods, - .tp_getset = FutureType_getsetlist, - .tp_dictoffset = offsetof(FutureObj, dict), - .tp_init = (initproc)_asyncio_Future___init__, - .tp_new = PyType_GenericNew, - .tp_finalize = (destructor)FutureObj_finalize, +static PyType_Slot Future_slots[] = { + {Py_tp_dealloc, FutureObj_dealloc}, + {Py_tp_repr, (reprfunc)FutureObj_repr}, + {Py_tp_doc, (void *)_asyncio_Future___init____doc__}, + {Py_tp_traverse, (traverseproc)FutureObj_traverse}, + {Py_tp_clear, (inquiry)FutureObj_clear}, + {Py_tp_iter, (getiterfunc)future_new_iter}, + {Py_tp_methods, FutureType_methods}, + {Py_tp_members, FutureType_members}, + {Py_tp_getset, FutureType_getsetlist}, + {Py_tp_init, (initproc)_asyncio_Future___init__}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_finalize, (destructor)FutureObj_finalize}, + + // async slots + {Py_am_await, (unaryfunc)future_new_iter}, + {0, NULL}, +}; + +static PyType_Spec Future_spec = { + .name = "_asyncio.Future", + .basicsize = sizeof(FutureObj), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = Future_slots, }; static void @@ -1540,16 +1585,12 @@ FutureObj_dealloc(PyObject *self) { FutureObj *fut = (FutureObj *)self; - if (Future_CheckExact(fut)) { - /* When fut is subclass of Future, finalizer is called from - * subtype_dealloc. - */ - if (PyObject_CallFinalizerFromDealloc(self) < 0) { - // resurrected. - return; - } + if (PyObject_CallFinalizerFromDealloc(self) < 0) { + // resurrected. + return; } + PyTypeObject *tp = Py_TYPE(fut); PyObject_GC_UnTrack(self); if (fut->fut_weakreflist != NULL) { @@ -1557,7 +1598,8 @@ FutureObj_dealloc(PyObject *self) } (void)FutureObj_clear(fut); - Py_TYPE(fut)->tp_free(fut); + tp->tp_free(fut); + Py_DECREF(tp); } @@ -1577,8 +1619,9 @@ static Py_ssize_t fi_freelist_len = 0; static void FutureIter_dealloc(futureiterobject *it) { + PyTypeObject *tp = Py_TYPE(it); PyObject_GC_UnTrack(it); - Py_CLEAR(it->future); + tp->tp_clear((PyObject *)it); if (fi_freelist_len < FI_FREELIST_MAXLEN) { fi_freelist_len++; @@ -1587,6 +1630,7 @@ FutureIter_dealloc(futureiterobject *it) } else { PyObject_GC_Del(it); + Py_DECREF(tp); } } @@ -1608,8 +1652,7 @@ FutureIter_am_send(futureiterobject *it, if (fut->fut_state == STATE_PENDING) { if (!fut->fut_blocking) { fut->fut_blocking = 1; - Py_INCREF(fut); - *result = (PyObject *)fut; + *result = Py_NewRef(fut); return PYGEN_NEXT; } PyErr_SetString(PyExc_RuntimeError, @@ -1725,16 +1768,24 @@ FutureIter_throw(futureiterobject *self, PyObject *const *args, Py_ssize_t nargs return NULL; } +static int +FutureIter_clear(futureiterobject *it) +{ + Py_CLEAR(it->future); + return 0; +} + static PyObject * FutureIter_close(futureiterobject *self, PyObject *arg) { - Py_CLEAR(self->future); + (void)FutureIter_clear(self); Py_RETURN_NONE; } static int FutureIter_traverse(futureiterobject *it, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(it)); Py_VISIT(it->future); return 0; } @@ -1746,27 +1797,26 @@ static PyMethodDef FutureIter_methods[] = { {NULL, NULL} /* Sentinel */ }; -static PyAsyncMethods FutureIterType_as_async = { - 0, /* am_await */ - 0, /* am_aiter */ - 0, /* am_anext */ - (sendfunc)FutureIter_am_send, /* am_send */ +static PyType_Slot FutureIter_slots[] = { + {Py_tp_dealloc, (destructor)FutureIter_dealloc}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, (traverseproc)FutureIter_traverse}, + {Py_tp_clear, FutureIter_clear}, + {Py_tp_iter, PyObject_SelfIter}, + {Py_tp_iternext, (iternextfunc)FutureIter_iternext}, + {Py_tp_methods, FutureIter_methods}, + + // async methods + {Py_am_send, (sendfunc)FutureIter_am_send}, + {0, NULL}, }; - -static PyTypeObject FutureIterType = { - PyVarObject_HEAD_INIT(NULL, 0) - "_asyncio.FutureIter", - .tp_basicsize = sizeof(futureiterobject), - .tp_itemsize = 0, - .tp_dealloc = (destructor)FutureIter_dealloc, - .tp_as_async = &FutureIterType_as_async, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)FutureIter_traverse, - .tp_iter = PyObject_SelfIter, - .tp_iternext = (iternextfunc)FutureIter_iternext, - .tp_methods = FutureIter_methods, +static PyType_Spec FutureIter_spec = { + .name = "_asyncio.FutureIter", + .basicsize = sizeof(futureiterobject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = FutureIter_slots, }; static PyObject * @@ -1774,12 +1824,13 @@ future_new_iter(PyObject *fut) { futureiterobject *it; - if (!PyObject_TypeCheck(fut, &FutureType)) { + asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); + if (!Future_Check(state, fut)) { PyErr_BadInternalCall(); return NULL; } - ENSURE_FUTURE_ALIVE(fut) + ENSURE_FUTURE_ALIVE(state, fut) if (fi_freelist_len) { fi_freelist_len--; @@ -1789,14 +1840,13 @@ future_new_iter(PyObject *fut) _Py_NewReference((PyObject*) it); } else { - it = PyObject_GC_New(futureiterobject, &FutureIterType); + it = PyObject_GC_New(futureiterobject, state->FutureIterType); if (it == NULL) { return NULL; } } - Py_INCREF(fut); - it->future = (FutureObj*)fut; + it->future = (FutureObj*)Py_NewRef(fut); PyObject_GC_Track(it); return (PyObject*)it; } @@ -1810,9 +1860,9 @@ class _asyncio.Task "TaskObj *" "&Task_Type" [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=719dcef0fcc03b37]*/ -static int task_call_step_soon(TaskObj *, PyObject *); +static int task_call_step_soon(asyncio_state *state, TaskObj *, PyObject *); static PyObject * task_wakeup(TaskObj *, PyObject *); -static PyObject * task_step(TaskObj *, PyObject *); +static PyObject * task_step(asyncio_state *, TaskObj *, PyObject *); /* ----- Task._step wrapper */ @@ -1827,9 +1877,11 @@ TaskStepMethWrapper_clear(TaskStepMethWrapper *o) static void TaskStepMethWrapper_dealloc(TaskStepMethWrapper *o) { + PyTypeObject *tp = Py_TYPE(o); PyObject_GC_UnTrack(o); (void)TaskStepMethWrapper_clear(o); Py_TYPE(o)->tp_free(o); + Py_DECREF(tp); } static PyObject * @@ -1844,13 +1896,15 @@ TaskStepMethWrapper_call(TaskStepMethWrapper *o, PyErr_SetString(PyExc_TypeError, "function takes no positional arguments"); return NULL; } - return task_step(o->sw_task, o->sw_arg); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)o); + return task_step(state, o->sw_task, o->sw_arg); } static int TaskStepMethWrapper_traverse(TaskStepMethWrapper *o, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(o)); Py_VISIT(o->sw_task); Py_VISIT(o->sw_arg); return 0; @@ -1860,8 +1914,7 @@ static PyObject * TaskStepMethWrapper_get___self__(TaskStepMethWrapper *o, void *Py_UNUSED(ignored)) { if (o->sw_task) { - Py_INCREF(o->sw_task); - return (PyObject*)o->sw_task; + return Py_NewRef(o->sw_task); } Py_RETURN_NONE; } @@ -1871,34 +1924,36 @@ static PyGetSetDef TaskStepMethWrapper_getsetlist[] = { {NULL} /* Sentinel */ }; -static PyTypeObject TaskStepMethWrapper_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "TaskStepMethWrapper", - .tp_basicsize = sizeof(TaskStepMethWrapper), - .tp_itemsize = 0, - .tp_getset = TaskStepMethWrapper_getsetlist, - .tp_dealloc = (destructor)TaskStepMethWrapper_dealloc, - .tp_call = (ternaryfunc)TaskStepMethWrapper_call, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)TaskStepMethWrapper_traverse, - .tp_clear = (inquiry)TaskStepMethWrapper_clear, +static PyType_Slot TaskStepMethWrapper_slots[] = { + {Py_tp_getset, TaskStepMethWrapper_getsetlist}, + {Py_tp_dealloc, (destructor)TaskStepMethWrapper_dealloc}, + {Py_tp_call, (ternaryfunc)TaskStepMethWrapper_call}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, (traverseproc)TaskStepMethWrapper_traverse}, + {Py_tp_clear, (inquiry)TaskStepMethWrapper_clear}, + {0, NULL}, +}; + +static PyType_Spec TaskStepMethWrapper_spec = { + .name = "_asyncio.TaskStepMethWrapper", + .basicsize = sizeof(TaskStepMethWrapper), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = TaskStepMethWrapper_slots, }; static PyObject * TaskStepMethWrapper_new(TaskObj *task, PyObject *arg) { + asyncio_state *state = get_asyncio_state_by_def((PyObject *)task); TaskStepMethWrapper *o; - o = PyObject_GC_New(TaskStepMethWrapper, &TaskStepMethWrapper_Type); + o = PyObject_GC_New(TaskStepMethWrapper, state->TaskStepMethWrapper_Type); if (o == NULL) { return NULL; } - Py_INCREF(task); - o->sw_task = task; - - Py_XINCREF(arg); - o->sw_arg = arg; + o->sw_task = (TaskObj*)Py_NewRef(task); + o->sw_arg = Py_XNewRef(arg); PyObject_GC_Track(o); return (PyObject*) o; @@ -1916,12 +1971,10 @@ static PyMethodDef TaskWakeupDef = { /* ----- Task introspection helpers */ static int -register_task(PyObject *task) +register_task(asyncio_state *state, PyObject *task) { - _Py_IDENTIFIER(add); - - PyObject *res = _PyObject_CallMethodIdOneArg(all_tasks, - &PyId_add, task); + PyObject *res = PyObject_CallMethodOneArg(state->all_tasks, + &_Py_ID(add), task); if (res == NULL) { return -1; } @@ -1931,12 +1984,10 @@ register_task(PyObject *task) static int -unregister_task(PyObject *task) +unregister_task(asyncio_state *state, PyObject *task) { - _Py_IDENTIFIER(discard); - - PyObject *res = _PyObject_CallMethodIdOneArg(all_tasks, - &PyId_discard, task); + PyObject *res = PyObject_CallMethodOneArg(state->all_tasks, + &_Py_ID(discard), task); if (res == NULL) { return -1; } @@ -1946,7 +1997,7 @@ unregister_task(PyObject *task) static int -enter_task(PyObject *loop, PyObject *task) +enter_task(asyncio_state *state, PyObject *loop, PyObject *task) { PyObject *item; Py_hash_t hash; @@ -1954,7 +2005,7 @@ enter_task(PyObject *loop, PyObject *task) if (hash == -1) { return -1; } - item = _PyDict_GetItem_KnownHash(current_tasks, loop, hash); + item = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); if (item != NULL) { Py_INCREF(item); PyErr_Format( @@ -1968,12 +2019,12 @@ enter_task(PyObject *loop, PyObject *task) if (PyErr_Occurred()) { return -1; } - return _PyDict_SetItem_KnownHash(current_tasks, loop, task, hash); + return _PyDict_SetItem_KnownHash(state->current_tasks, loop, task, hash); } static int -leave_task(PyObject *loop, PyObject *task) +leave_task(asyncio_state *state, PyObject *loop, PyObject *task) /*[clinic end generated code: output=0ebf6db4b858fb41 input=51296a46313d1ad8]*/ { PyObject *item; @@ -1982,7 +2033,7 @@ leave_task(PyObject *loop, PyObject *task) if (hash == -1) { return -1; } - item = _PyDict_GetItem_KnownHash(current_tasks, loop, hash); + item = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); if (item != task) { if (item == NULL) { /* Not entered, replace with None */ @@ -1994,7 +2045,7 @@ leave_task(PyObject *loop, PyObject *task) task, item, NULL); return -1; } - return _PyDict_DelItem_KnownHash(current_tasks, loop, hash); + return _PyDict_DelItem_KnownHash(state->current_tasks, loop, hash); } /* ----- Task */ @@ -2021,7 +2072,8 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, return -1; } - int is_coro = is_coroutine(coro); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)self); + int is_coro = is_coroutine(state, coro); if (is_coro == -1) { return -1; } @@ -2050,7 +2102,8 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, Py_XSETREF(self->task_coro, coro); if (name == Py_None) { - name = PyUnicode_FromFormat("Task-%" PRIu64, ++task_name_counter); + name = PyUnicode_FromFormat("Task-%" PRIu64, + ++state->task_name_counter); } else if (!PyUnicode_CheckExact(name)) { name = PyObject_Str(name); } else { @@ -2061,10 +2114,10 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, return -1; } - if (task_call_step_soon(self, NULL)) { + if (task_call_step_soon(state, self, NULL)) { return -1; } - return register_task((PyObject*)self); + return register_task(state, (PyObject*)self); } static int @@ -2081,11 +2134,23 @@ TaskObj_clear(TaskObj *task) static int TaskObj_traverse(TaskObj *task, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(task)); Py_VISIT(task->task_context); Py_VISIT(task->task_coro); Py_VISIT(task->task_name); Py_VISIT(task->task_fut_waiter); - (void)FutureObj_traverse((FutureObj*) task, visit, arg); + FutureObj *fut = (FutureObj *)task; + Py_VISIT(fut->fut_loop); + Py_VISIT(fut->fut_callback0); + Py_VISIT(fut->fut_context0); + Py_VISIT(fut->fut_callbacks); + Py_VISIT(fut->fut_result); + Py_VISIT(fut->fut_exception); + Py_VISIT(fut->fut_exception_tb); + Py_VISIT(fut->fut_source_tb); + Py_VISIT(fut->fut_cancel_msg); + Py_VISIT(fut->fut_cancelled_exc); + Py_VISIT(fut->dict); return 0; } @@ -2130,8 +2195,7 @@ static PyObject * TaskObj_get_coro(TaskObj *task, void *Py_UNUSED(ignored)) { if (task->task_coro) { - Py_INCREF(task->task_coro); - return task->task_coro; + return Py_NewRef(task->task_coro); } Py_RETURN_NONE; @@ -2141,8 +2205,7 @@ static PyObject * TaskObj_get_fut_waiter(TaskObj *task, void *Py_UNUSED(ignored)) { if (task->task_fut_waiter) { - Py_INCREF(task->task_fut_waiter); - return task->task_fut_waiter; + return Py_NewRef(task->task_fut_waiter); } Py_RETURN_NONE; @@ -2151,7 +2214,9 @@ TaskObj_get_fut_waiter(TaskObj *task, void *Py_UNUSED(ignored)) static PyObject * TaskObj_repr(TaskObj *task) { - return PyObject_CallOneArg(asyncio_task_repr_func, (PyObject *)task); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)task); + return PyObject_CallOneArg(state->asyncio_task_repr_func, + (PyObject *)task); } @@ -2223,8 +2288,8 @@ _asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg) PyObject *res; int is_true; - res = _PyObject_CallMethodIdOneArg(self->task_fut_waiter, - &PyId_cancel, msg); + res = PyObject_CallMethodOneArg(self->task_fut_waiter, + &_Py_ID(cancel), msg); if (res == NULL) { return NULL; } @@ -2288,6 +2353,8 @@ _asyncio_Task_uncancel_impl(TaskObj *self) /*[clinic input] _asyncio.Task.get_stack + cls: defining_class + / * limit: object = None @@ -2313,16 +2380,20 @@ returned for a suspended coroutine. [clinic start generated code]*/ static PyObject * -_asyncio_Task_get_stack_impl(TaskObj *self, PyObject *limit) -/*[clinic end generated code: output=c9aeeeebd1e18118 input=05b323d42b809b90]*/ +_asyncio_Task_get_stack_impl(TaskObj *self, PyTypeObject *cls, + PyObject *limit) +/*[clinic end generated code: output=6774dfc10d3857fa input=8e01c9b2618ae953]*/ { + asyncio_state *state = get_asyncio_state_by_cls(cls); return PyObject_CallFunctionObjArgs( - asyncio_task_get_stack_func, self, limit, NULL); + state->asyncio_task_get_stack_func, self, limit, NULL); } /*[clinic input] _asyncio.Task.print_stack + cls: defining_class + / * limit: object = None file: object = None @@ -2337,12 +2408,13 @@ to sys.stderr. [clinic start generated code]*/ static PyObject * -_asyncio_Task_print_stack_impl(TaskObj *self, PyObject *limit, - PyObject *file) -/*[clinic end generated code: output=7339e10314cd3f4d input=1a0352913b7fcd92]*/ +_asyncio_Task_print_stack_impl(TaskObj *self, PyTypeObject *cls, + PyObject *limit, PyObject *file) +/*[clinic end generated code: output=b38affe9289ec826 input=150b35ba2d3a7dee]*/ { + asyncio_state *state = get_asyncio_state_by_cls(cls); return PyObject_CallFunctionObjArgs( - asyncio_task_print_stack_func, self, limit, file, NULL); + state->asyncio_task_print_stack_func, self, limit, file, NULL); } /*[clinic input] @@ -2385,8 +2457,7 @@ static PyObject * _asyncio_Task_get_coro_impl(TaskObj *self) /*[clinic end generated code: output=bcac27c8cc6c8073 input=d2e8606c42a7b403]*/ { - Py_INCREF(self->task_coro); - return self->task_coro; + return Py_NewRef(self->task_coro); } /*[clinic input] @@ -2397,8 +2468,7 @@ static PyObject * _asyncio_Task_get_context_impl(TaskObj *self) /*[clinic end generated code: output=6996f53d3dc01aef input=87c0b209b8fceeeb]*/ { - Py_INCREF(self->task_context); - return self->task_context; + return Py_NewRef(self->task_context); } /*[clinic input] @@ -2410,8 +2480,7 @@ _asyncio_Task_get_name_impl(TaskObj *self) /*[clinic end generated code: output=0ecf1570c3b37a8f input=a4a6595d12f4f0f8]*/ { if (self->task_name) { - Py_INCREF(self->task_name); - return self->task_name; + return Py_NewRef(self->task_name); } Py_RETURN_NONE; @@ -2444,11 +2513,6 @@ _asyncio_Task_set_name(TaskObj *self, PyObject *value) static void TaskObj_finalize(TaskObj *task) { - _Py_IDENTIFIER(call_exception_handler); - _Py_IDENTIFIER(task); - _Py_IDENTIFIER(message); - _Py_IDENTIFIER(source_traceback); - PyObject *context; PyObject *message = NULL; PyObject *func; @@ -2471,21 +2535,21 @@ TaskObj_finalize(TaskObj *task) goto finally; } - if (_PyDict_SetItemId(context, &PyId_message, message) < 0 || - _PyDict_SetItemId(context, &PyId_task, (PyObject*)task) < 0) + if (PyDict_SetItem(context, &_Py_ID(message), message) < 0 || + PyDict_SetItem(context, &_Py_ID(task), (PyObject*)task) < 0) { goto finally; } if (task->task_source_tb != NULL) { - if (_PyDict_SetItemId(context, &PyId_source_traceback, + if (PyDict_SetItem(context, &_Py_ID(source_traceback), task->task_source_tb) < 0) { goto finally; } } - func = _PyObject_GetAttrId(task->task_loop, &PyId_call_exception_handler); + func = PyObject_GetAttr(task->task_loop, &_Py_ID(call_exception_handler)); if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { @@ -2533,6 +2597,12 @@ static PyMethodDef TaskType_methods[] = { {NULL, NULL} /* Sentinel */ }; +static PyMemberDef TaskType_members[] = { + {"__weaklistoffset__", T_PYSSIZET, offsetof(TaskObj, task_weakreflist), READONLY}, + {"__dictoffset__", T_PYSSIZET, offsetof(TaskObj, dict), READONLY}, + {NULL}, +}; + static PyGetSetDef TaskType_getsetlist[] = { FUTURE_COMMON_GETSETLIST {"_log_destroy_pending", (getter)TaskObj_get_log_destroy_pending, @@ -2543,26 +2613,31 @@ static PyGetSetDef TaskType_getsetlist[] = { {NULL} /* Sentinel */ }; -static PyTypeObject TaskType = { - PyVarObject_HEAD_INIT(NULL, 0) - "_asyncio.Task", - sizeof(TaskObj), /* tp_basicsize */ - .tp_base = &FutureType, - .tp_dealloc = TaskObj_dealloc, - .tp_as_async = &FutureType_as_async, - .tp_repr = (reprfunc)TaskObj_repr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - .tp_doc = _asyncio_Task___init____doc__, - .tp_traverse = (traverseproc)TaskObj_traverse, - .tp_clear = (inquiry)TaskObj_clear, - .tp_weaklistoffset = offsetof(TaskObj, task_weakreflist), - .tp_iter = (getiterfunc)future_new_iter, - .tp_methods = TaskType_methods, - .tp_getset = TaskType_getsetlist, - .tp_dictoffset = offsetof(TaskObj, dict), - .tp_init = (initproc)_asyncio_Task___init__, - .tp_new = PyType_GenericNew, - .tp_finalize = (destructor)TaskObj_finalize, +static PyType_Slot Task_slots[] = { + {Py_tp_dealloc, TaskObj_dealloc}, + {Py_tp_repr, (reprfunc)TaskObj_repr}, + {Py_tp_doc, (void *)_asyncio_Task___init____doc__}, + {Py_tp_traverse, (traverseproc)TaskObj_traverse}, + {Py_tp_clear, (inquiry)TaskObj_clear}, + {Py_tp_iter, (getiterfunc)future_new_iter}, + {Py_tp_methods, TaskType_methods}, + {Py_tp_members, TaskType_members}, + {Py_tp_getset, TaskType_getsetlist}, + {Py_tp_init, (initproc)_asyncio_Task___init__}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_finalize, (destructor)TaskObj_finalize}, + + // async slots + {Py_am_await, (unaryfunc)future_new_iter}, + {0, NULL}, +}; + +static PyType_Spec Task_spec = { + .name = "_asyncio.Task", + .basicsize = sizeof(TaskObj), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = Task_slots, }; static void @@ -2570,16 +2645,12 @@ TaskObj_dealloc(PyObject *self) { TaskObj *task = (TaskObj *)self; - if (Task_CheckExact(self)) { - /* When fut is subclass of Task, finalizer is called from - * subtype_dealloc. - */ - if (PyObject_CallFinalizerFromDealloc(self) < 0) { - // resurrected. - return; - } + if (PyObject_CallFinalizerFromDealloc(self) < 0) { + // resurrected. + return; } + PyTypeObject *tp = Py_TYPE(task); PyObject_GC_UnTrack(self); if (task->task_weakreflist != NULL) { @@ -2587,24 +2658,26 @@ TaskObj_dealloc(PyObject *self) } (void)TaskObj_clear(task); - Py_TYPE(task)->tp_free(task); + tp->tp_free(task); + Py_DECREF(tp); } static int -task_call_step_soon(TaskObj *task, PyObject *arg) +task_call_step_soon(asyncio_state *state, TaskObj *task, PyObject *arg) { PyObject *cb = TaskStepMethWrapper_new(task, arg); if (cb == NULL) { return -1; } - int ret = call_soon(task->task_loop, cb, NULL, task->task_context); + int ret = call_soon(state, task->task_loop, cb, NULL, task->task_context); Py_DECREF(cb); return ret; } static PyObject * -task_set_error_soon(TaskObj *task, PyObject *et, const char *format, ...) +task_set_error_soon(asyncio_state *state, TaskObj *task, PyObject *et, + const char *format, ...) { PyObject* msg; @@ -2623,7 +2696,7 @@ task_set_error_soon(TaskObj *task, PyObject *et, const char *format, ...) return NULL; } - if (task_call_step_soon(task, e) == -1) { + if (task_call_step_soon(state, task, e) == -1) { Py_DECREF(e); return NULL; } @@ -2647,7 +2720,7 @@ gen_status_from_result(PyObject **result) } static PyObject * -task_step_impl(TaskObj *task, PyObject *exc) +task_step_impl(asyncio_state *state, TaskObj *task, PyObject *exc) { int res; int clear_exc = 0; @@ -2656,7 +2729,7 @@ task_step_impl(TaskObj *task, PyObject *exc) PyObject *o; if (task->task_state != STATE_PENDING) { - PyErr_Format(asyncio_InvalidStateError, + PyErr_Format(state->asyncio_InvalidStateError, "_step(): already done: %R %R", task, exc ? exc : Py_None); @@ -2668,7 +2741,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (exc) { /* Check if exc is a CancelledError */ - res = PyObject_IsInstance(exc, asyncio_CancelledError); + res = PyObject_IsInstance(exc, state->asyncio_CancelledError); if (res == -1) { /* An error occurred, abort */ goto fail; @@ -2681,7 +2754,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (!exc) { /* exc was not a CancelledError */ - exc = create_cancelled_error((FutureObj*)task); + exc = create_cancelled_error(state, (FutureObj*)task); if (!exc) { goto fail; @@ -2709,7 +2782,7 @@ task_step_impl(TaskObj *task, PyObject *exc) gen_status = PyIter_Send(coro, Py_None, &result); } else { - result = _PyObject_CallMethodIdOneArg(coro, &PyId_throw, exc); + result = PyObject_CallMethodOneArg(coro, &_Py_ID(throw), exc); gen_status = gen_status_from_result(&result); if (clear_exc) { /* We created 'exc' during this call */ @@ -2728,10 +2801,11 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { // Task is cancelled right before coro stops. task->task_must_cancel = 0; - tmp = future_cancel((FutureObj*)task, task->task_cancel_msg); + tmp = future_cancel(state, (FutureObj*)task, + task->task_cancel_msg); } else { - tmp = future_set_result((FutureObj*)task, result); + tmp = future_set_result(state, (FutureObj*)task, result); } Py_DECREF(result); @@ -2743,7 +2817,7 @@ task_step_impl(TaskObj *task, PyObject *exc) Py_RETURN_NONE; } - if (PyErr_ExceptionMatches(asyncio_CancelledError)) { + if (PyErr_ExceptionMatches(state->asyncio_CancelledError)) { /* CancelledError */ PyErr_Fetch(&et, &ev, &tb); assert(et); @@ -2758,7 +2832,7 @@ task_step_impl(TaskObj *task, PyObject *exc) /* transfer ownership */ fut->fut_cancelled_exc = ev; - return future_cancel(fut, NULL); + return future_cancel(state, fut, NULL); } /* Some other exception; pop it and call Task.set_exception() */ @@ -2769,7 +2843,7 @@ task_step_impl(TaskObj *task, PyObject *exc) PyException_SetTraceback(ev, tb); } - o = future_set_exception((FutureObj*)task, ev); + o = future_set_exception(state, (FutureObj*)task, ev); if (!o) { /* An exception in Task.set_exception() */ Py_DECREF(et); @@ -2801,7 +2875,7 @@ task_step_impl(TaskObj *task, PyObject *exc) } /* Check if `result` is FutureObj or TaskObj (and not a subclass) */ - if (Future_CheckExact(result) || Task_CheckExact(result)) { + if (Future_CheckExact(state, result) || Task_CheckExact(state, result)) { PyObject *wrapper; PyObject *tmp; FutureObj *fut = (FutureObj*)result; @@ -2822,7 +2896,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (wrapper == NULL) { goto fail; } - tmp = future_add_done_callback( + tmp = future_add_done_callback(state, (FutureObj*)result, wrapper, task->task_context); Py_DECREF(wrapper); if (tmp == NULL) { @@ -2836,7 +2910,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { PyObject *r; int is_true; - r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel, + r = PyObject_CallMethodOneArg(result, &_Py_ID(cancel), task->task_cancel_msg); if (r == NULL) { return NULL; @@ -2857,14 +2931,14 @@ task_step_impl(TaskObj *task, PyObject *exc) /* Check if `result` is None */ if (result == Py_None) { /* Bare yield relinquishes control for one event loop iteration. */ - if (task_call_step_soon(task, NULL)) { + if (task_call_step_soon(state, task, NULL)) { goto fail; } return result; } /* Check if `result` is a Future-compatible object */ - if (_PyObject_LookupAttrId(result, &PyId__asyncio_future_blocking, &o) < 0) { + if (_PyObject_LookupAttr(result, &_Py_ID(_asyncio_future_blocking), &o) < 0) { goto fail; } if (o != NULL && o != Py_None) { @@ -2879,7 +2953,7 @@ task_step_impl(TaskObj *task, PyObject *exc) } /* Check if `result` future is attached to a different loop */ - PyObject *oloop = get_future_loop(result); + PyObject *oloop = get_future_loop(state, result); if (oloop == NULL) { goto fail; } @@ -2894,8 +2968,8 @@ task_step_impl(TaskObj *task, PyObject *exc) } /* result._asyncio_future_blocking = False */ - if (_PyObject_SetAttrId( - result, &PyId__asyncio_future_blocking, Py_False) == -1) { + if (PyObject_SetAttr( + result, &_Py_ID(_asyncio_future_blocking), Py_False) == -1) { goto fail; } @@ -2905,8 +2979,8 @@ task_step_impl(TaskObj *task, PyObject *exc) } /* result.add_done_callback(task._wakeup) */ - PyObject *add_cb = _PyObject_GetAttrId( - result, &PyId_add_done_callback); + PyObject *add_cb = PyObject_GetAttr( + result, &_Py_ID(add_done_callback)); if (add_cb == NULL) { Py_DECREF(wrapper); goto fail; @@ -2915,7 +2989,7 @@ task_step_impl(TaskObj *task, PyObject *exc) stack[0] = wrapper; stack[1] = (PyObject *)task->task_context; EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_API, add_cb); - tmp = PyObject_Vectorcall(add_cb, stack, 1, context_kwname); + tmp = PyObject_Vectorcall(add_cb, stack, 1, state->context_kwname); Py_DECREF(add_cb); Py_DECREF(wrapper); if (tmp == NULL) { @@ -2929,7 +3003,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { PyObject *r; int is_true; - r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel, + r = PyObject_CallMethodOneArg(result, &_Py_ID(cancel), task->task_cancel_msg); if (r == NULL) { return NULL; @@ -2956,7 +3030,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (res) { /* `result` is a generator */ o = task_set_error_soon( - task, PyExc_RuntimeError, + state, task, PyExc_RuntimeError, "yield was used instead of yield from for " "generator in task %R with %R", task, result); Py_DECREF(result); @@ -2965,20 +3039,20 @@ task_step_impl(TaskObj *task, PyObject *exc) /* The `result` is none of the above */ o = task_set_error_soon( - task, PyExc_RuntimeError, "Task got bad yield: %R", result); + state, task, PyExc_RuntimeError, "Task got bad yield: %R", result); Py_DECREF(result); return o; self_await: o = task_set_error_soon( - task, PyExc_RuntimeError, + state, task, PyExc_RuntimeError, "Task cannot await on itself: %R", task); Py_DECREF(result); return o; yield_insteadof_yf: o = task_set_error_soon( - task, PyExc_RuntimeError, + state, task, PyExc_RuntimeError, "yield was used instead of yield from " "in task %R with %R", task, result); @@ -2987,7 +3061,7 @@ task_step_impl(TaskObj *task, PyObject *exc) different_loop: o = task_set_error_soon( - task, PyExc_RuntimeError, + state, task, PyExc_RuntimeError, "Task %R got Future %R attached to a different loop", task, result); Py_DECREF(result); @@ -2999,25 +3073,25 @@ task_step_impl(TaskObj *task, PyObject *exc) } static PyObject * -task_step(TaskObj *task, PyObject *exc) +task_step(asyncio_state *state, TaskObj *task, PyObject *exc) { PyObject *res; - if (enter_task(task->task_loop, (PyObject*)task) < 0) { + if (enter_task(state, task->task_loop, (PyObject*)task) < 0) { return NULL; } - res = task_step_impl(task, exc); + res = task_step_impl(state, task, exc); if (res == NULL) { PyObject *et, *ev, *tb; PyErr_Fetch(&et, &ev, &tb); - leave_task(task->task_loop, (PyObject*)task); + leave_task(state, task->task_loop, (PyObject*)task); _PyErr_ChainExceptions(et, ev, tb); /* Normalizes (et, ev, tb) */ return NULL; } else { - if (leave_task(task->task_loop, (PyObject*)task) < 0) { + if (leave_task(state, task->task_loop, (PyObject*)task) < 0) { Py_DECREF(res); return NULL; } @@ -3034,9 +3108,10 @@ task_wakeup(TaskObj *task, PyObject *o) PyObject *result; assert(o); - if (Future_CheckExact(o) || Task_CheckExact(o)) { + asyncio_state *state = get_asyncio_state_by_def((PyObject *)task); + if (Future_CheckExact(state, o) || Task_CheckExact(state, o)) { PyObject *fut_result = NULL; - int res = future_get_result((FutureObj*)o, &fut_result); + int res = future_get_result(state, (FutureObj*)o, &fut_result); switch(res) { case -1: @@ -3044,10 +3119,10 @@ task_wakeup(TaskObj *task, PyObject *o) break; /* exception raised */ case 0: Py_DECREF(fut_result); - return task_step(task, NULL); + return task_step(state, task, NULL); default: assert(res == 1); - result = task_step(task, fut_result); + result = task_step(state, task, fut_result); Py_DECREF(fut_result); return result; } @@ -3056,7 +3131,7 @@ task_wakeup(TaskObj *task, PyObject *o) PyObject *fut_result = PyObject_CallMethod(o, "result", NULL); if (fut_result != NULL) { Py_DECREF(fut_result); - return task_step(task, NULL); + return task_step(state, task, NULL); } /* exception raised */ } @@ -3068,7 +3143,7 @@ task_wakeup(TaskObj *task, PyObject *o) PyException_SetTraceback(ev, tb); } - result = task_step(task, ev); + result = task_step(state, task, ev); Py_DECREF(et); Py_XDECREF(tb); @@ -3096,7 +3171,8 @@ _asyncio__get_running_loop_impl(PyObject *module) /*[clinic end generated code: output=b4390af721411a0a input=0a21627e25a4bd43]*/ { PyObject *loop; - if (get_running_loop(&loop)) { + asyncio_state *state = get_asyncio_state(module); + if (get_running_loop(state, &loop)) { return NULL; } if (loop == NULL) { @@ -3121,7 +3197,8 @@ static PyObject * _asyncio__set_running_loop(PyObject *module, PyObject *loop) /*[clinic end generated code: output=ae56bf7a28ca189a input=4c9720233d606604]*/ { - if (set_running_loop(loop)) { + asyncio_state *state = get_asyncio_state(module); + if (set_running_loop(state, loop)) { return NULL; } Py_RETURN_NONE; @@ -3144,19 +3221,8 @@ static PyObject * _asyncio_get_event_loop_impl(PyObject *module) /*[clinic end generated code: output=2a2d8b2f824c648b input=9364bf2916c8655d]*/ { - return get_event_loop(1); -} - -/*[clinic input] -_asyncio._get_event_loop - stacklevel: int = 3 -[clinic start generated code]*/ - -static PyObject * -_asyncio__get_event_loop_impl(PyObject *module, int stacklevel) -/*[clinic end generated code: output=9c1d6d3c802e67c9 input=d17aebbd686f711d]*/ -{ - return get_event_loop(stacklevel-1); + asyncio_state *state = get_asyncio_state(module); + return get_event_loop(state); } /*[clinic input] @@ -3172,7 +3238,8 @@ _asyncio_get_running_loop_impl(PyObject *module) /*[clinic end generated code: output=c247b5f9e529530e input=2a3bf02ba39f173d]*/ { PyObject *loop; - if (get_running_loop(&loop)) { + asyncio_state *state = get_asyncio_state(module); + if (get_running_loop(state, &loop)) { return NULL; } if (loop == NULL) { @@ -3197,7 +3264,8 @@ static PyObject * _asyncio__register_task_impl(PyObject *module, PyObject *task) /*[clinic end generated code: output=8672dadd69a7d4e2 input=21075aaea14dfbad]*/ { - if (register_task(task) < 0) { + asyncio_state *state = get_asyncio_state(module); + if (register_task(state, task) < 0) { return NULL; } Py_RETURN_NONE; @@ -3218,7 +3286,8 @@ static PyObject * _asyncio__unregister_task_impl(PyObject *module, PyObject *task) /*[clinic end generated code: output=6e5585706d568a46 input=28fb98c3975f7bdc]*/ { - if (unregister_task(task) < 0) { + asyncio_state *state = get_asyncio_state(module); + if (unregister_task(state, task) < 0) { return NULL; } Py_RETURN_NONE; @@ -3242,7 +3311,8 @@ static PyObject * _asyncio__enter_task_impl(PyObject *module, PyObject *loop, PyObject *task) /*[clinic end generated code: output=a22611c858035b73 input=de1b06dca70d8737]*/ { - if (enter_task(loop, task) < 0) { + asyncio_state *state = get_asyncio_state(module); + if (enter_task(state, loop, task) < 0) { return NULL; } Py_RETURN_NONE; @@ -3266,7 +3336,8 @@ static PyObject * _asyncio__leave_task_impl(PyObject *module, PyObject *loop, PyObject *task) /*[clinic end generated code: output=0ebf6db4b858fb41 input=51296a46313d1ad8]*/ { - if (leave_task(loop, task) < 0) { + asyncio_state *state = get_asyncio_state(module); + if (leave_task(state, loop, task) < 0) { return NULL; } Py_RETURN_NONE; @@ -3277,10 +3348,10 @@ _asyncio__leave_task_impl(PyObject *module, PyObject *loop, PyObject *task) static PyRunningLoopHolder * -new_running_loop_holder(PyObject *loop) +new_running_loop_holder(asyncio_state *state, PyObject *loop) { - PyRunningLoopHolder *rl = PyObject_New( - PyRunningLoopHolder, &PyRunningLoopHolder_Type); + PyRunningLoopHolder *rl = PyObject_GC_New( + PyRunningLoopHolder, state->PyRunningLoopHolder_Type); if (rl == NULL) { return NULL; } @@ -3288,32 +3359,61 @@ new_running_loop_holder(PyObject *loop) #if defined(HAVE_GETPID) && !defined(MS_WINDOWS) rl->rl_pid = getpid(); #endif + rl->rl_loop = Py_NewRef(loop); - Py_INCREF(loop); - rl->rl_loop = loop; - + PyObject_GC_Track(rl); return rl; } +static int +PyRunningLoopHolder_clear(PyRunningLoopHolder *rl) +{ + Py_CLEAR(rl->rl_loop); + return 0; +} + + +static int +PyRunningLoopHolder_traverse(PyRunningLoopHolder *rl, visitproc visit, + void *arg) +{ + Py_VISIT(Py_TYPE(rl)); + Py_VISIT(rl->rl_loop); + return 0; +} + + static void PyRunningLoopHolder_tp_dealloc(PyRunningLoopHolder *rl) { - if (cached_running_holder == (PyObject *)rl) { - cached_running_holder = NULL; + asyncio_state *state = get_asyncio_state_by_def((PyObject *)rl); + if (state->cached_running_holder == (PyObject *)rl) { + state->cached_running_holder = NULL; } - Py_CLEAR(rl->rl_loop); - PyObject_Free(rl); + PyTypeObject *tp = Py_TYPE(rl); + PyObject_GC_UnTrack(rl); + PyRunningLoopHolder_clear(rl); + PyObject_GC_Del(rl); + Py_DECREF(tp); } -static PyTypeObject PyRunningLoopHolder_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_RunningLoopHolder", - sizeof(PyRunningLoopHolder), - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT, - .tp_dealloc = (destructor)PyRunningLoopHolder_tp_dealloc, +static PyType_Slot PyRunningLoopHolder_slots[] = { + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_dealloc, (destructor)PyRunningLoopHolder_tp_dealloc}, + {Py_tp_traverse, (traverseproc)PyRunningLoopHolder_traverse}, + {Py_tp_clear, PyRunningLoopHolder_clear}, + {0, NULL}, +}; + + +static PyType_Spec PyRunningLoopHolder_spec = { + .name = "_asyncio._RunningLoopHolder", + .basicsize = sizeof(PyRunningLoopHolder), + .slots = PyRunningLoopHolder_slots, + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), }; @@ -3339,58 +3439,106 @@ module_free_freelists(void) fi_freelist = NULL; } +static int +module_traverse(PyObject *mod, visitproc visit, void *arg) +{ + asyncio_state *state = get_asyncio_state(mod); + + Py_VISIT(state->FutureIterType); + Py_VISIT(state->TaskStepMethWrapper_Type); + Py_VISIT(state->FutureType); + Py_VISIT(state->TaskType); + Py_VISIT(state->PyRunningLoopHolder_Type); + + Py_VISIT(state->asyncio_mod); + Py_VISIT(state->traceback_extract_stack); + Py_VISIT(state->asyncio_future_repr_func); + Py_VISIT(state->asyncio_get_event_loop_policy); + Py_VISIT(state->asyncio_iscoroutine_func); + Py_VISIT(state->asyncio_task_get_stack_func); + Py_VISIT(state->asyncio_task_print_stack_func); + Py_VISIT(state->asyncio_task_repr_func); + Py_VISIT(state->asyncio_InvalidStateError); + Py_VISIT(state->asyncio_CancelledError); + + Py_VISIT(state->all_tasks); + Py_VISIT(state->current_tasks); + Py_VISIT(state->iscoroutine_typecache); + + Py_VISIT(state->context_kwname); + + // Visit freelist. + PyObject *next = (PyObject*) fi_freelist; + while (next != NULL) { + PyObject *current = next; + Py_VISIT(current); + next = (PyObject*) ((futureiterobject*) current)->future; + } + return 0; +} -static void -module_free(void *m) +static int +module_clear(PyObject *mod) { - Py_CLEAR(asyncio_mod); - Py_CLEAR(traceback_extract_stack); - Py_CLEAR(asyncio_future_repr_func); - Py_CLEAR(asyncio_get_event_loop_policy); - Py_CLEAR(asyncio_iscoroutine_func); - Py_CLEAR(asyncio_task_get_stack_func); - Py_CLEAR(asyncio_task_print_stack_func); - Py_CLEAR(asyncio_task_repr_func); - Py_CLEAR(asyncio_InvalidStateError); - Py_CLEAR(asyncio_CancelledError); + asyncio_state *state = get_asyncio_state(mod); + + Py_CLEAR(state->FutureIterType); + Py_CLEAR(state->TaskStepMethWrapper_Type); + Py_CLEAR(state->FutureType); + Py_CLEAR(state->TaskType); + Py_CLEAR(state->PyRunningLoopHolder_Type); - Py_CLEAR(all_tasks); - Py_CLEAR(current_tasks); - Py_CLEAR(iscoroutine_typecache); + Py_CLEAR(state->asyncio_mod); + Py_CLEAR(state->traceback_extract_stack); + Py_CLEAR(state->asyncio_future_repr_func); + Py_CLEAR(state->asyncio_get_event_loop_policy); + Py_CLEAR(state->asyncio_iscoroutine_func); + Py_CLEAR(state->asyncio_task_get_stack_func); + Py_CLEAR(state->asyncio_task_print_stack_func); + Py_CLEAR(state->asyncio_task_repr_func); + Py_CLEAR(state->asyncio_InvalidStateError); + Py_CLEAR(state->asyncio_CancelledError); - Py_CLEAR(context_kwname); + Py_CLEAR(state->all_tasks); + Py_CLEAR(state->current_tasks); + Py_CLEAR(state->iscoroutine_typecache); + + Py_CLEAR(state->context_kwname); module_free_freelists(); - module_initialized = 0; + return 0; +} + +static void +module_free(void *mod) +{ + (void)module_clear((PyObject *)mod); } static int -module_init(void) +module_init(asyncio_state *state) { PyObject *module = NULL; - if (module_initialized) { - return 0; - } - asyncio_mod = PyImport_ImportModule("asyncio"); - if (asyncio_mod == NULL) { + state->asyncio_mod = PyImport_ImportModule("asyncio"); + if (state->asyncio_mod == NULL) { goto fail; } - current_tasks = PyDict_New(); - if (current_tasks == NULL) { + state->current_tasks = PyDict_New(); + if (state->current_tasks == NULL) { goto fail; } - iscoroutine_typecache = PySet_New(NULL); - if (iscoroutine_typecache == NULL) { + state->iscoroutine_typecache = PySet_New(NULL); + if (state->iscoroutine_typecache == NULL) { goto fail; } - context_kwname = Py_BuildValue("(s)", "context"); - if (context_kwname == NULL) { + state->context_kwname = Py_BuildValue("(s)", "context"); + if (state->context_kwname == NULL) { goto fail; } @@ -3408,42 +3556,40 @@ module_init(void) } WITH_MOD("asyncio.events") - GET_MOD_ATTR(asyncio_get_event_loop_policy, "get_event_loop_policy") + GET_MOD_ATTR(state->asyncio_get_event_loop_policy, "get_event_loop_policy") WITH_MOD("asyncio.base_futures") - GET_MOD_ATTR(asyncio_future_repr_func, "_future_repr") + GET_MOD_ATTR(state->asyncio_future_repr_func, "_future_repr") WITH_MOD("asyncio.exceptions") - GET_MOD_ATTR(asyncio_InvalidStateError, "InvalidStateError") - GET_MOD_ATTR(asyncio_CancelledError, "CancelledError") + GET_MOD_ATTR(state->asyncio_InvalidStateError, "InvalidStateError") + GET_MOD_ATTR(state->asyncio_CancelledError, "CancelledError") WITH_MOD("asyncio.base_tasks") - GET_MOD_ATTR(asyncio_task_repr_func, "_task_repr") - GET_MOD_ATTR(asyncio_task_get_stack_func, "_task_get_stack") - GET_MOD_ATTR(asyncio_task_print_stack_func, "_task_print_stack") + GET_MOD_ATTR(state->asyncio_task_repr_func, "_task_repr") + GET_MOD_ATTR(state->asyncio_task_get_stack_func, "_task_get_stack") + GET_MOD_ATTR(state->asyncio_task_print_stack_func, "_task_print_stack") WITH_MOD("asyncio.coroutines") - GET_MOD_ATTR(asyncio_iscoroutine_func, "iscoroutine") + GET_MOD_ATTR(state->asyncio_iscoroutine_func, "iscoroutine") WITH_MOD("traceback") - GET_MOD_ATTR(traceback_extract_stack, "extract_stack") + GET_MOD_ATTR(state->traceback_extract_stack, "extract_stack") PyObject *weak_set; WITH_MOD("weakref") GET_MOD_ATTR(weak_set, "WeakSet"); - all_tasks = PyObject_CallNoArgs(weak_set); + state->all_tasks = PyObject_CallNoArgs(weak_set); Py_CLEAR(weak_set); - if (all_tasks == NULL) { + if (state->all_tasks == NULL) { goto fail; } - module_initialized = 1; Py_DECREF(module); return 0; fail: Py_CLEAR(module); - module_free(NULL); return -1; #undef WITH_MOD @@ -3454,7 +3600,6 @@ PyDoc_STRVAR(module_doc, "Accelerator module for asyncio"); static PyMethodDef asyncio_methods[] = { _ASYNCIO_GET_EVENT_LOOP_METHODDEF - _ASYNCIO__GET_EVENT_LOOP_METHODDEF _ASYNCIO_GET_RUNNING_LOOP_METHODDEF _ASYNCIO__GET_RUNNING_LOOP_METHODDEF _ASYNCIO__SET_RUNNING_LOOP_METHODDEF @@ -3465,64 +3610,71 @@ static PyMethodDef asyncio_methods[] = { {NULL, NULL} }; -static struct PyModuleDef _asynciomodule = { - PyModuleDef_HEAD_INIT, /* m_base */ - "_asyncio", /* m_name */ - module_doc, /* m_doc */ - -1, /* m_size */ - asyncio_methods, /* m_methods */ - NULL, /* m_slots */ - NULL, /* m_traverse */ - NULL, /* m_clear */ - (freefunc)module_free /* m_free */ -}; +static int +module_exec(PyObject *mod) +{ + asyncio_state *state = get_asyncio_state(mod); +#define CREATE_TYPE(m, tp, spec, base) \ + do { \ + tp = (PyTypeObject *)PyType_FromMetaclass(NULL, m, spec, \ + (PyObject *)base); \ + if (tp == NULL) { \ + return -1; \ + } \ + } while (0) -PyMODINIT_FUNC -PyInit__asyncio(void) -{ - if (module_init() < 0) { - return NULL; - } - if (PyType_Ready(&FutureIterType) < 0) { - return NULL; + CREATE_TYPE(mod, state->TaskStepMethWrapper_Type, &TaskStepMethWrapper_spec, NULL); + CREATE_TYPE(mod, state->PyRunningLoopHolder_Type, &PyRunningLoopHolder_spec, NULL); + CREATE_TYPE(mod, state->FutureIterType, &FutureIter_spec, NULL); + CREATE_TYPE(mod, state->FutureType, &Future_spec, NULL); + CREATE_TYPE(mod, state->TaskType, &Task_spec, state->FutureType); + +#undef CREATE_TYPE + + if (PyModule_AddType(mod, state->FutureType) < 0) { + return -1; } - if (PyType_Ready(&TaskStepMethWrapper_Type) < 0) { - return NULL; + + if (PyModule_AddType(mod, state->TaskType) < 0) { + return -1; } - if (PyType_Ready(&PyRunningLoopHolder_Type) < 0) { - return NULL; + // Must be done after types are added to avoid a circular dependency + if (module_init(state) < 0) { + return -1; } - PyObject *m = PyModule_Create(&_asynciomodule); - if (m == NULL) { - return NULL; + if (PyModule_AddObjectRef(mod, "_all_tasks", state->all_tasks) < 0) { + return -1; } - /* FutureType and TaskType are made ready by PyModule_AddType() calls below. */ - if (PyModule_AddType(m, &FutureType) < 0) { - Py_DECREF(m); - return NULL; + if (PyModule_AddObjectRef(mod, "_current_tasks", state->current_tasks) < 0) { + return -1; } - if (PyModule_AddType(m, &TaskType) < 0) { - Py_DECREF(m); - return NULL; - } - Py_INCREF(all_tasks); - if (PyModule_AddObject(m, "_all_tasks", all_tasks) < 0) { - Py_DECREF(all_tasks); - Py_DECREF(m); - return NULL; - } + return 0; +} - Py_INCREF(current_tasks); - if (PyModule_AddObject(m, "_current_tasks", current_tasks) < 0) { - Py_DECREF(current_tasks); - Py_DECREF(m); - return NULL; - } +static struct PyModuleDef_Slot module_slots[] = { + {Py_mod_exec, module_exec}, + {0, NULL}, +}; - return m; +static struct PyModuleDef _asynciomodule = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "_asyncio", + .m_doc = module_doc, + .m_size = sizeof(asyncio_state), + .m_methods = asyncio_methods, + .m_slots = module_slots, + .m_traverse = module_traverse, + .m_clear = module_clear, + .m_free = (freefunc)module_free, +}; + +PyMODINIT_FUNC +PyInit__asyncio(void) +{ + return PyModuleDef_Init(&_asynciomodule); } diff --git a/Modules/_codecsmodule.c b/Modules/_codecsmodule.c index 8a0df4266e8354..d5035d20600ae2 100644 --- a/Modules/_codecsmodule.c +++ b/Modules/_codecsmodule.c @@ -256,14 +256,14 @@ _codecs_escape_encode_impl(PyObject *module, PyObject *data, _codecs.utf_7_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_7_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=0cd3a944a32a4089 input=22c395d357815d26]*/ +/*[clinic end generated code: output=0cd3a944a32a4089 input=dbf8c8998102dc7d]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = PyUnicode_DecodeUTF7Stateful(data->buf, data->len, @@ -276,14 +276,14 @@ _codecs_utf_7_decode_impl(PyObject *module, Py_buffer *data, _codecs.utf_8_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_8_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=10f74dec8d9bb8bf input=f611b3867352ba59]*/ +/*[clinic end generated code: output=10f74dec8d9bb8bf input=ca06bc8a9c970e25]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = PyUnicode_DecodeUTF8Stateful(data->buf, data->len, @@ -296,14 +296,14 @@ _codecs_utf_8_decode_impl(PyObject *module, Py_buffer *data, _codecs.utf_16_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_16_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=783b442abcbcc2d0 input=191d360bd7309180]*/ +/*[clinic end generated code: output=783b442abcbcc2d0 input=5b0f52071ba6cadc]*/ { int byteorder = 0; /* This is overwritten unless final is true. */ @@ -318,14 +318,14 @@ _codecs_utf_16_decode_impl(PyObject *module, Py_buffer *data, _codecs.utf_16_le_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_16_le_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=899b9e6364379dcd input=c6904fdc27fb4724]*/ +/*[clinic end generated code: output=899b9e6364379dcd input=115bd8c7b783d0bf]*/ { int byteorder = -1; /* This is overwritten unless final is true. */ @@ -340,14 +340,14 @@ _codecs_utf_16_le_decode_impl(PyObject *module, Py_buffer *data, _codecs.utf_16_be_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_16_be_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=49f6465ea07669c8 input=e49012400974649b]*/ +/*[clinic end generated code: output=49f6465ea07669c8 input=63131422b01f9cb4]*/ { int byteorder = 1; /* This is overwritten unless final is true. */ @@ -370,14 +370,14 @@ _codecs.utf_16_ex_decode data: Py_buffer errors: str(accept={str, NoneType}) = None byteorder: int = 0 - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_16_ex_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int byteorder, int final) -/*[clinic end generated code: output=0f385f251ecc1988 input=5a9c19f2e6b6cf0e]*/ +/*[clinic end generated code: output=0f385f251ecc1988 input=f368a51cf384bf4c]*/ { /* This is overwritten unless final is true. */ Py_ssize_t consumed = data->len; @@ -394,14 +394,14 @@ _codecs_utf_16_ex_decode_impl(PyObject *module, Py_buffer *data, _codecs.utf_32_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_32_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=2fc961807f7b145f input=fd7193965627eb58]*/ +/*[clinic end generated code: output=2fc961807f7b145f input=fcdf3658c5e9b5f3]*/ { int byteorder = 0; /* This is overwritten unless final is true. */ @@ -416,14 +416,14 @@ _codecs_utf_32_decode_impl(PyObject *module, Py_buffer *data, _codecs.utf_32_le_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_32_le_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=ec8f46b67a94f3e6 input=9078ec70acfe7613]*/ +/*[clinic end generated code: output=ec8f46b67a94f3e6 input=12220556e885f817]*/ { int byteorder = -1; /* This is overwritten unless final is true. */ @@ -438,14 +438,14 @@ _codecs_utf_32_le_decode_impl(PyObject *module, Py_buffer *data, _codecs.utf_32_be_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_32_be_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=ff82bae862c92c4e input=f1ae1bbbb86648ff]*/ +/*[clinic end generated code: output=ff82bae862c92c4e input=2bc669b4781598db]*/ { int byteorder = 1; /* This is overwritten unless final is true. */ @@ -468,14 +468,14 @@ _codecs.utf_32_ex_decode data: Py_buffer errors: str(accept={str, NoneType}) = None byteorder: int = 0 - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_utf_32_ex_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int byteorder, int final) -/*[clinic end generated code: output=6bfb177dceaf4848 input=e46a73bc859d0bd0]*/ +/*[clinic end generated code: output=6bfb177dceaf4848 input=4a2323d0013620df]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = PyUnicode_DecodeUTF32Stateful(data->buf, data->len, @@ -490,14 +490,14 @@ _codecs_utf_32_ex_decode_impl(PyObject *module, Py_buffer *data, _codecs.unicode_escape_decode data: Py_buffer(accept={str, buffer}) errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = True + final: bool = True / [clinic start generated code]*/ static PyObject * _codecs_unicode_escape_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=b284f97b12c635ee input=6154f039a9f7c639]*/ +/*[clinic end generated code: output=b284f97b12c635ee input=15019f081ffe272b]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = _PyUnicode_DecodeUnicodeEscapeStateful(data->buf, data->len, @@ -510,14 +510,14 @@ _codecs_unicode_escape_decode_impl(PyObject *module, Py_buffer *data, _codecs.raw_unicode_escape_decode data: Py_buffer(accept={str, buffer}) errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = True + final: bool = True / [clinic start generated code]*/ static PyObject * _codecs_raw_unicode_escape_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=11dbd96301e2879e input=2d166191beb3235a]*/ +/*[clinic end generated code: output=11dbd96301e2879e input=b93f823aa8c343ad]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = _PyUnicode_DecodeRawUnicodeEscapeStateful(data->buf, data->len, @@ -586,14 +586,14 @@ _codecs_charmap_decode_impl(PyObject *module, Py_buffer *data, _codecs.mbcs_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_mbcs_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=39b65b8598938c4b input=1c1d50f08fa53789]*/ +/*[clinic end generated code: output=39b65b8598938c4b input=f144ad1ed6d8f5a6]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = PyUnicode_DecodeMBCSStateful(data->buf, data->len, @@ -605,14 +605,14 @@ _codecs_mbcs_decode_impl(PyObject *module, Py_buffer *data, _codecs.oem_decode data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_oem_decode_impl(PyObject *module, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=da1617612f3fcad8 input=81b67cba811022e5]*/ +/*[clinic end generated code: output=da1617612f3fcad8 input=629bf87376d211b4]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = PyUnicode_DecodeCodePageStateful(CP_OEMCP, @@ -625,14 +625,14 @@ _codecs.code_page_decode codepage: int data: Py_buffer errors: str(accept={str, NoneType}) = None - final: bool(accept={int}) = False + final: bool = False / [clinic start generated code]*/ static PyObject * _codecs_code_page_decode_impl(PyObject *module, int codepage, Py_buffer *data, const char *errors, int final) -/*[clinic end generated code: output=53008ea967da3fff input=c5f58d036cb63575]*/ +/*[clinic end generated code: output=53008ea967da3fff input=6a32589b0658c277]*/ { Py_ssize_t consumed = data->len; PyObject *decoded = PyUnicode_DecodeCodePageStateful(codepage, diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 741cfbe9dc6142..68131f3b54d2ea 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -299,8 +299,7 @@ deque_append_internal(dequeobject *deque, PyObject *item, Py_ssize_t maxlen) static PyObject * deque_append(dequeobject *deque, PyObject *item) { - Py_INCREF(item); - if (deque_append_internal(deque, item, deque->maxlen) < 0) + if (deque_append_internal(deque, Py_NewRef(item), deque->maxlen) < 0) return NULL; Py_RETURN_NONE; } @@ -336,8 +335,7 @@ deque_appendleft_internal(dequeobject *deque, PyObject *item, Py_ssize_t maxlen) static PyObject * deque_appendleft(dequeobject *deque, PyObject *item) { - Py_INCREF(item); - if (deque_appendleft_internal(deque, item, deque->maxlen) < 0) + if (deque_appendleft_internal(deque, Py_NewRef(item), deque->maxlen) < 0) return NULL; Py_RETURN_NONE; } @@ -655,14 +653,12 @@ deque_inplace_repeat(dequeobject *deque, Py_ssize_t n) size = Py_SIZE(deque); if (size == 0 || n == 1) { - Py_INCREF(deque); - return (PyObject *)deque; + return Py_NewRef(deque); } if (n <= 0) { deque_clear(deque); - Py_INCREF(deque); - return (PyObject *)deque; + return Py_NewRef(deque); } if (size == 1) { @@ -693,13 +689,11 @@ deque_inplace_repeat(dequeobject *deque, Py_ssize_t n) i += m; while (m--) { deque->rightindex++; - Py_INCREF(item); - deque->rightblock->data[deque->rightindex] = item; + deque->rightblock->data[deque->rightindex] = Py_NewRef(item); } } Py_SET_SIZE(deque, Py_SIZE(deque) + i); - Py_INCREF(deque); - return (PyObject *)deque; + return Py_NewRef(deque); } if ((size_t)size > PY_SSIZE_T_MAX / (size_t)n) { @@ -972,8 +966,7 @@ deque_count(dequeobject *deque, PyObject *v) while (--n >= 0) { CHECK_NOT_END(b); - item = b->data[index]; - Py_INCREF(item); + item = Py_NewRef(b->data[index]); cmp = PyObject_RichCompareBool(item, v, Py_EQ); Py_DECREF(item); if (cmp < 0) @@ -1011,8 +1004,7 @@ deque_contains(dequeobject *deque, PyObject *v) while (--n >= 0) { CHECK_NOT_END(b); - item = b->data[index]; - Py_INCREF(item); + item = Py_NewRef(b->data[index]); cmp = PyObject_RichCompareBool(item, v, Py_EQ); Py_DECREF(item); if (cmp) { @@ -1201,8 +1193,7 @@ deque_item(dequeobject *deque, Py_ssize_t i) } } item = b->data[i]; - Py_INCREF(item); - return item; + return Py_NewRef(item); } static int @@ -1231,8 +1222,7 @@ deque_remove(dequeobject *deque, PyObject *value) int cmp, rv; for (i = 0 ; i < n; i++) { - item = b->data[index]; - Py_INCREF(item); + item = Py_NewRef(b->data[index]); cmp = PyObject_RichCompareBool(item, value, Py_EQ); Py_DECREF(item); if (cmp < 0) { @@ -1266,7 +1256,6 @@ deque_remove(dequeobject *deque, PyObject *value) static int deque_ass_item(dequeobject *deque, Py_ssize_t i, PyObject *v) { - PyObject *old_value; block *b; Py_ssize_t n, len=Py_SIZE(deque), halflen=(len+1)>>1, index=i; @@ -1292,10 +1281,7 @@ deque_ass_item(dequeobject *deque, Py_ssize_t i, PyObject *v) while (--n >= 0) b = b->leftlink; } - Py_INCREF(v); - old_value = b->data[i]; - b->data[i] = v; - Py_DECREF(old_value); + Py_SETREF(b->data[i], Py_NewRef(v)); return 0; } @@ -1522,15 +1508,13 @@ deque_init(dequeobject *deque, PyObject *args, PyObject *kwdargs) static PyObject * deque_sizeof(dequeobject *deque, void *unused) { - Py_ssize_t res; - Py_ssize_t blocks; - - res = _PyObject_SIZE(Py_TYPE(deque)); + size_t res = _PyObject_SIZE(Py_TYPE(deque)); + size_t blocks; blocks = (size_t)(deque->leftindex + Py_SIZE(deque) + BLOCKLEN - 1) / BLOCKLEN; - assert(deque->leftindex + Py_SIZE(deque) - 1 == - (blocks - 1) * BLOCKLEN + deque->rightindex); + assert(((size_t)deque->leftindex + (size_t)Py_SIZE(deque) - 1) == + ((blocks - 1) * BLOCKLEN + (size_t)deque->rightindex)); res += blocks * sizeof(block); - return PyLong_FromSsize_t(res); + return PyLong_FromSize_t(res); } PyDoc_STRVAR(sizeof_doc, @@ -1686,8 +1670,7 @@ deque_iter(dequeobject *deque) return NULL; it->b = deque->leftblock; it->index = deque->leftindex; - Py_INCREF(deque); - it->deque = deque; + it->deque = (dequeobject*)Py_NewRef(deque); it->state = deque->state; it->counter = Py_SIZE(deque); PyObject_GC_Track(it); @@ -1734,8 +1717,7 @@ dequeiter_next(dequeiterobject *it) it->b = it->b->rightlink; it->index = 0; } - Py_INCREF(item); - return item; + return Py_NewRef(item); } static PyObject * @@ -1844,8 +1826,7 @@ deque_reviter(dequeobject *deque, PyObject *Py_UNUSED(ignored)) return NULL; it->b = deque->rightblock; it->index = deque->rightindex; - Py_INCREF(deque); - it->deque = deque; + it->deque = (dequeobject*)Py_NewRef(deque); it->state = deque->state; it->counter = Py_SIZE(deque); PyObject_GC_Track(it); @@ -1876,8 +1857,7 @@ dequereviter_next(dequeiterobject *it) it->b = it->b->leftlink; it->index = BLOCKLEN - 1; } - Py_INCREF(item); - return item; + return Py_NewRef(item); } static PyObject * @@ -2203,8 +2183,7 @@ defdict_init(PyObject *self, PyObject *args, PyObject *kwds) } if (newargs == NULL) return -1; - Py_XINCREF(newdefault); - dd->default_factory = newdefault; + dd->default_factory = Py_XNewRef(newdefault); result = PyDict_Type.tp_init(self, newargs, kwds); Py_DECREF(newargs); Py_XDECREF(olddefault); @@ -2414,8 +2393,7 @@ tuplegetter_new_impl(PyTypeObject *type, Py_ssize_t index, PyObject *doc) return NULL; } self->index = index; - Py_INCREF(doc); - self->doc = doc; + self->doc = Py_NewRef(doc); return (PyObject *)self; } @@ -2426,13 +2404,11 @@ tuplegetter_descr_get(PyObject *self, PyObject *obj, PyObject *type) PyObject *result; if (obj == NULL) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } if (!PyTuple_Check(obj)) { if (obj == Py_None) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyErr_Format(PyExc_TypeError, "descriptor for index '%zd' for tuple subclasses " @@ -2448,8 +2424,7 @@ tuplegetter_descr_get(PyObject *self, PyObject *obj, PyObject *type) } result = PyTuple_GET_ITEM(obj, index); - Py_INCREF(result); - return result; + return Py_NewRef(result); } static int diff --git a/Modules/_csv.c b/Modules/_csv.c index 25bf86f78e8fb9..bd337084dbff81 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -176,8 +176,7 @@ get_char_or_None(Py_UCS4 c) static PyObject * Dialect_get_lineterminator(DialectObj *self, void *Py_UNUSED(ignored)) { - Py_XINCREF(self->lineterminator); - return self->lineterminator; + return Py_XNewRef(self->lineterminator); } static PyObject * @@ -316,8 +315,7 @@ _set_str(const char *name, PyObject **target, PyObject *src, const char *dflt) else { if (PyUnicode_READY(src) == -1) return -1; - Py_INCREF(src); - Py_XSETREF(*target, src); + Py_XSETREF(*target, Py_NewRef(src)); } } return 0; @@ -514,8 +512,7 @@ dialect_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto err; } - ret = (PyObject *)self; - Py_INCREF(self); + ret = Py_NewRef(self); err: Py_CLEAR(self); Py_CLEAR(dialect); diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index a3c7c8c471e584..b9092d3981f364 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -101,8 +101,6 @@ bytes(cdata) #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER - #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -233,10 +231,8 @@ PyDict_SetItemProxy(PyObject *dict, PyObject *key, PyObject *item) remover = (DictRemoverObject *)obj; assert(remover->key == NULL); assert(remover->dict == NULL); - Py_INCREF(key); - remover->key = key; - Py_INCREF(dict); - remover->dict = dict; + remover->key = Py_NewRef(key); + remover->dict = Py_NewRef(dict); proxy = PyWeakref_NewProxy(item, obj); Py_DECREF(obj); @@ -472,8 +468,7 @@ StructUnionType_paramfunc(CDataObject *self) struct_param->ptr = ptr; } else { ptr = self->b_ptr; - obj = (PyObject *)self; - Py_INCREF(obj); + obj = Py_NewRef(self); } parg = PyCArgObject_new(); @@ -498,8 +493,6 @@ StructUnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds, int isSt PyTypeObject *result; PyObject *fields; StgDictObject *dict; - _Py_IDENTIFIER(_abstract_); - _Py_IDENTIFIER(_fields_); /* create the new instance (which is a class, since we are a metatype!) */ @@ -508,7 +501,7 @@ StructUnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds, int isSt return NULL; /* keep this for bw compatibility */ - int r = _PyDict_ContainsId(result->tp_dict, &PyId__abstract_); + int r = PyDict_Contains(result->tp_dict, &_Py_ID(_abstract_)); if (r > 0) return (PyObject *)result; if (r < 0) { @@ -540,9 +533,9 @@ StructUnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds, int isSt dict->paramfunc = StructUnionType_paramfunc; - fields = _PyDict_GetItemIdWithError((PyObject *)dict, &PyId__fields_); + fields = PyDict_GetItemWithError((PyObject *)dict, &_Py_ID(_fields_)); if (fields) { - if (_PyObject_SetAttrId((PyObject *)result, &PyId__fields_, fields) < 0) { + if (PyObject_SetAttr((PyObject *)result, &_Py_ID(_fields_), fields) < 0) { Py_DECREF(result); return NULL; } @@ -797,14 +790,12 @@ PyDoc_STRVAR(from_param_doc, static PyObject * CDataType_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); PyObject *as_parameter; int res = PyObject_IsInstance(value, type); if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyCArg_CheckExact(value)) { PyCArgObject *p = (PyCArgObject *)value; @@ -820,8 +811,7 @@ CDataType_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } ob_name = (ob) ? Py_TYPE(ob)->tp_name : "???"; @@ -831,7 +821,7 @@ CDataType_from_param(PyObject *type, PyObject *value) return NULL; } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1055,8 +1045,7 @@ PyCPointerType_paramfunc(CDataObject *self) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; - Py_INCREF(self); - parg->obj = (PyObject *)self; + parg->obj = Py_NewRef(self); parg->value.p = *(void **)self->b_ptr; return parg; } @@ -1068,7 +1057,7 @@ PyCPointerType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) StgDictObject *stgdict; PyObject *proto; PyObject *typedict; - _Py_IDENTIFIER(_type_); + typedict = PyTuple_GetItem(args, 2); if (!typedict) @@ -1088,7 +1077,7 @@ PyCPointerType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) stgdict->paramfunc = PyCPointerType_paramfunc; stgdict->flags |= TYPEFLAG_ISPOINTER; - proto = _PyDict_GetItemIdWithError(typedict, &PyId__type_); /* Borrowed ref */ + proto = PyDict_GetItemWithError(typedict, &_Py_ID(_type_)); /* Borrowed ref */ if (proto) { StgDictObject *itemdict; const char *current_format; @@ -1146,7 +1135,7 @@ static PyObject * PyCPointerType_set_type(PyTypeObject *self, PyObject *type) { StgDictObject *dict; - _Py_IDENTIFIER(_type_); + dict = PyType_stgdict((PyObject *)self); if (!dict) { @@ -1158,7 +1147,7 @@ PyCPointerType_set_type(PyTypeObject *self, PyObject *type) if (-1 == PyCPointerType_SetProto(dict, type)) return NULL; - if (-1 == _PyDict_SetItemId((PyObject *)dict, &PyId__type_, type)) + if (-1 == PyDict_SetItem((PyObject *)dict, &_Py_ID(_type_), type)) return NULL; Py_RETURN_NONE; @@ -1173,8 +1162,7 @@ PyCPointerType_from_param(PyObject *type, PyObject *value) if (value == Py_None) { /* ConvParam will convert to a NULL pointer later */ - Py_INCREF(value); - return value; + return Py_NewRef(value); } typedict = PyType_stgdict(type); @@ -1208,8 +1196,7 @@ PyCPointerType_from_param(PyObject *type, PyObject *value) return NULL; } if (ret) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } return CDataType_from_param(type, value); @@ -1453,16 +1440,13 @@ PyCArrayType_paramfunc(CDataObject *self) p->tag = 'P'; p->pffi_type = &ffi_type_pointer; p->value.p = (char *)self->b_ptr; - Py_INCREF(self); - p->obj = (PyObject *)self; + p->obj = Py_NewRef(self); return p; } static PyObject * PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - _Py_IDENTIFIER(_length_); - _Py_IDENTIFIER(_type_); PyTypeObject *result; StgDictObject *stgdict; StgDictObject *itemdict; @@ -1481,7 +1465,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) stgdict = NULL; type_attr = NULL; - if (_PyObject_LookupAttrId((PyObject *)result, &PyId__length_, &length_attr) < 0) { + if (_PyObject_LookupAttr((PyObject *)result, &_Py_ID(_length_), &length_attr) < 0) { goto error; } if (!length_attr) { @@ -1514,7 +1498,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) goto error; } - if (_PyObject_LookupAttrId((PyObject *)result, &PyId__type_, &type_attr) < 0) { + if (_PyObject_LookupAttr((PyObject *)result, &_Py_ID(_type_), &type_attr) < 0) { goto error; } if (!type_attr) { @@ -1659,7 +1643,6 @@ static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdfuzZqQPXOv?g"; static PyObject * c_wchar_p_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); PyObject *as_parameter; int res; if (value == Py_None) { @@ -1685,8 +1668,7 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (ArrayObject_Check(value) || PointerObject_Check(value)) { /* c_wchar array instance or pointer(c_wchar(...)) */ @@ -1695,8 +1677,7 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) assert(dt); /* Cannot be NULL for pointer or array objects */ dict = dt && dt->proto ? PyType_stgdict(dt->proto) : NULL; if (dict && (dict->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } if (PyCArg_CheckExact(value)) { @@ -1704,12 +1685,11 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); if (dict && (dict->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1726,7 +1706,6 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) static PyObject * c_char_p_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); PyObject *as_parameter; int res; if (value == Py_None) { @@ -1752,8 +1731,7 @@ c_char_p_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (ArrayObject_Check(value) || PointerObject_Check(value)) { /* c_char array instance or pointer(c_char(...)) */ @@ -1762,8 +1740,7 @@ c_char_p_from_param(PyObject *type, PyObject *value) assert(dt); /* Cannot be NULL for pointer or array objects */ dict = dt && dt->proto ? PyType_stgdict(dt->proto) : NULL; if (dict && (dict->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } if (PyCArg_CheckExact(value)) { @@ -1771,12 +1748,11 @@ c_char_p_from_param(PyObject *type, PyObject *value) PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); if (dict && (dict->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1793,7 +1769,6 @@ c_char_p_from_param(PyObject *type, PyObject *value) static PyObject * c_void_p_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); StgDictObject *stgd; PyObject *as_parameter; int res; @@ -1861,22 +1836,19 @@ c_void_p_from_param(PyObject *type, PyObject *value) return NULL; if (res) { /* c_void_p instances */ - Py_INCREF(value); - return value; + return Py_NewRef(value); } /* ctypes array or pointer instance */ if (ArrayObject_Check(value) || PointerObject_Check(value)) { /* Any array or pointer is accepted */ - Py_INCREF(value); - return value; + return Py_NewRef(value); } /* byref(...) */ if (PyCArg_CheckExact(value)) { /* byref(c_xxx()) */ PyCArgObject *a = (PyCArgObject *)value; if (a->tag == 'P') { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } /* function pointer */ @@ -1907,15 +1879,14 @@ c_void_p_from_param(PyObject *type, PyObject *value) return NULL; parg->pffi_type = &ffi_type_pointer; parg->tag = 'Z'; - Py_INCREF(value); - parg->obj = value; + parg->obj = Py_NewRef(value); /* Remember: b_ptr points to where the pointer is stored! */ parg->value.p = *(void **)(((CDataObject *)value)->b_ptr); return (PyObject *)parg; } } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1993,8 +1964,7 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject stgdict->setfunc = fmt->setfunc_swapped; stgdict->getfunc = fmt->getfunc_swapped; - Py_INCREF(proto); - stgdict->proto = proto; + stgdict->proto = Py_NewRef(proto); /* replace the class dict by our updated spam dict */ if (-1 == PyDict_Update((PyObject *)stgdict, result->tp_dict)) { @@ -2029,8 +1999,7 @@ PyCSimpleType_paramfunc(CDataObject *self) parg->tag = fmt[0]; parg->pffi_type = fd->pffi_type; - Py_INCREF(self); - parg->obj = (PyObject *)self; + parg->obj = Py_NewRef(self); memcpy(&parg->value, self->b_ptr, self->b_size); return parg; } @@ -2038,7 +2007,6 @@ PyCSimpleType_paramfunc(CDataObject *self) static PyObject * PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - _Py_IDENTIFIER(_type_); PyTypeObject *result; StgDictObject *stgdict; PyObject *proto; @@ -2053,7 +2021,7 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (result == NULL) return NULL; - if (_PyObject_LookupAttrId((PyObject *)result, &PyId__type_, &proto) < 0) { + if (_PyObject_LookupAttr((PyObject *)result, &_Py_ID(_type_), &proto) < 0) { return NULL; } if (!proto) { @@ -2223,7 +2191,6 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static PyObject * PyCSimpleType_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); StgDictObject *dict; const char *fmt; PyCArgObject *parg; @@ -2237,8 +2204,7 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } dict = PyType_stgdict(type); @@ -2267,7 +2233,7 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value) PyErr_Clear(); Py_DECREF(parg); - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -2344,7 +2310,6 @@ PyTypeObject PyCSimpleType_Type = { static PyObject * converters_from_argtypes(PyObject *ob) { - _Py_IDENTIFIER(from_param); PyObject *converters; Py_ssize_t i; @@ -2424,7 +2389,7 @@ converters_from_argtypes(PyObject *ob) } */ - if (_PyObject_LookupAttrId(tp, &PyId_from_param, &cnv) <= 0) { + if (_PyObject_LookupAttr(tp, &_Py_ID(from_param), &cnv) <= 0) { Py_DECREF(converters); Py_DECREF(ob); if (!PyErr_Occurred()) { @@ -2445,10 +2410,6 @@ make_funcptrtype_dict(StgDictObject *stgdict) { PyObject *ob; PyObject *converters = NULL; - _Py_IDENTIFIER(_flags_); - _Py_IDENTIFIER(_argtypes_); - _Py_IDENTIFIER(_restype_); - _Py_IDENTIFIER(_check_retval_); stgdict->align = _ctypes_get_fielddesc("P")->pffi_type->alignment; stgdict->length = 1; @@ -2457,7 +2418,7 @@ make_funcptrtype_dict(StgDictObject *stgdict) stgdict->getfunc = NULL; stgdict->ffi_type_pointer = ffi_type_pointer; - ob = _PyDict_GetItemIdWithError((PyObject *)stgdict, &PyId__flags_); + ob = PyDict_GetItemWithError((PyObject *)stgdict, &_Py_ID(_flags_)); if (!ob || !PyLong_Check(ob)) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, @@ -2468,29 +2429,27 @@ make_funcptrtype_dict(StgDictObject *stgdict) stgdict->flags = PyLong_AsUnsignedLongMask(ob) | TYPEFLAG_ISPOINTER; /* _argtypes_ is optional... */ - ob = _PyDict_GetItemIdWithError((PyObject *)stgdict, &PyId__argtypes_); + ob = PyDict_GetItemWithError((PyObject *)stgdict, &_Py_ID(_argtypes_)); if (ob) { converters = converters_from_argtypes(ob); if (!converters) return -1; - Py_INCREF(ob); - stgdict->argtypes = ob; + stgdict->argtypes = Py_NewRef(ob); stgdict->converters = converters; } else if (PyErr_Occurred()) { return -1; } - ob = _PyDict_GetItemIdWithError((PyObject *)stgdict, &PyId__restype_); + ob = PyDict_GetItemWithError((PyObject *)stgdict, &_Py_ID(_restype_)); if (ob) { if (ob != Py_None && !PyType_stgdict(ob) && !PyCallable_Check(ob)) { PyErr_SetString(PyExc_TypeError, "_restype_ must be a type, a callable, or None"); return -1; } - Py_INCREF(ob); - stgdict->restype = ob; - if (_PyObject_LookupAttrId(ob, &PyId__check_retval_, + stgdict->restype = Py_NewRef(ob); + if (_PyObject_LookupAttr(ob, &_Py_ID(_check_retval_), &stgdict->checker) < 0) { return -1; @@ -2507,8 +2466,7 @@ make_funcptrtype_dict(StgDictObject *stgdict) "_errcheck_ must be callable"); return -1; } - Py_INCREF(ob); - stgdict->errcheck = ob; + stgdict->errcheck = Py_NewRef(ob); } else if (PyErr_Occurred()) { return -1; @@ -2528,8 +2486,7 @@ PyCFuncPtrType_paramfunc(CDataObject *self) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; - Py_INCREF(self); - parg->obj = (PyObject *)self; + parg->obj = Py_NewRef(self); parg->value.p = *(void **)self->b_ptr; return parg; } @@ -2641,8 +2598,7 @@ PyCData_GetContainer(CDataObject *self) if (self->b_objects == NULL) return NULL; } else { - Py_INCREF(Py_None); - self->b_objects = Py_None; + self->b_objects = Py_NewRef(Py_None); } } return self; @@ -2784,8 +2740,7 @@ static int PyCData_NewGetBuffer(PyObject *myself, Py_buffer *view, int flags) if (view == NULL) return 0; view->buf = self->b_ptr; - view->obj = myself; - Py_INCREF(myself); + view->obj = Py_NewRef(myself); view->len = self->b_size; view->readonly = 0; /* use default format character if not set */ @@ -2877,8 +2832,7 @@ PyCData_setstate(PyObject *myself, PyObject *args) static PyObject * PyCData_from_outparam(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyMethodDef PyCData_methods[] = { @@ -2983,8 +2937,7 @@ PyCData_FromBaseObj(PyObject *type, PyObject *base, Py_ssize_t index, char *adr) assert(CDataObject_Check(base)); cmem->b_ptr = adr; cmem->b_needsfree = 0; - Py_INCREF(base); - cmem->b_base = (CDataObject *)base; + cmem->b_base = (CDataObject *)Py_NewRef(base); cmem->b_index = index; } else { /* copy contents of adr */ if (-1 == PyCData_MallocBuffer(cmem, dict)) { @@ -3121,8 +3074,7 @@ _PyCData_set(CDataObject *dst, PyObject *type, SETFUNC setfunc, PyObject *value, if (value == NULL) return NULL; - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyCPointerTypeObject_Check(type) @@ -3245,8 +3197,7 @@ static PyObject * PyCFuncPtr_get_errcheck(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { if (self->errcheck) { - Py_INCREF(self->errcheck); - return self->errcheck; + return Py_NewRef(self->errcheck); } Py_RETURN_NONE; } @@ -3254,7 +3205,6 @@ PyCFuncPtr_get_errcheck(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) static int PyCFuncPtr_set_restype(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored)) { - _Py_IDENTIFIER(_check_retval_); PyObject *checker, *oldchecker; if (ob == NULL) { oldchecker = self->checker; @@ -3268,7 +3218,7 @@ PyCFuncPtr_set_restype(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ign "restype must be a type, a callable, or None"); return -1; } - if (_PyObject_LookupAttrId(ob, &PyId__check_retval_, &checker) < 0) { + if (_PyObject_LookupAttr(ob, &_Py_ID(_check_retval_), &checker) < 0) { return -1; } oldchecker = self->checker; @@ -3284,14 +3234,12 @@ PyCFuncPtr_get_restype(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { StgDictObject *dict; if (self->restype) { - Py_INCREF(self->restype); - return self->restype; + return Py_NewRef(self->restype); } dict = PyObject_stgdict((PyObject *)self); assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */ if (dict->restype) { - Py_INCREF(dict->restype); - return dict->restype; + return Py_NewRef(dict->restype); } else { Py_RETURN_NONE; } @@ -3321,14 +3269,12 @@ PyCFuncPtr_get_argtypes(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { StgDictObject *dict; if (self->argtypes) { - Py_INCREF(self->argtypes); - return self->argtypes; + return Py_NewRef(self->argtypes); } dict = PyObject_stgdict((PyObject *)self); assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */ if (dict->argtypes) { - Py_INCREF(dict->argtypes); - return dict->argtypes; + return Py_NewRef(dict->argtypes); } else { Py_RETURN_NONE; } @@ -3622,8 +3568,7 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_XINCREF(paramflags); - self->paramflags = paramflags; + self->paramflags = Py_XNewRef(paramflags); *(void **)self->b_ptr = address; Py_INCREF(dll); @@ -3633,8 +3578,7 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(self); - self->callable = (PyObject *)self; + self->callable = Py_NewRef(self); return (PyObject *)self; } @@ -3659,8 +3603,7 @@ PyCFuncPtr_FromVtblIndex(PyTypeObject *type, PyObject *args, PyObject *kwds) self = (PyCFuncPtrObject *)GenericPyCData_new(type, args, kwds); self->index = index + 0x1000; - Py_XINCREF(paramflags); - self->paramflags = paramflags; + self->paramflags = Py_XNewRef(paramflags); if (iid_len == sizeof(GUID)) self->iid = iid; return (PyObject *)self; @@ -3758,8 +3701,7 @@ PyCFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(callable); - self->callable = callable; + self->callable = Py_NewRef(callable); self->thunk = thunk; *(void **)self->b_ptr = (void *)thunk->pcl_exec; @@ -3807,23 +3749,20 @@ _get_arg(int *pindex, PyObject *name, PyObject *defval, PyObject *inargs, PyObje if (*pindex < PyTuple_GET_SIZE(inargs)) { v = PyTuple_GET_ITEM(inargs, *pindex); ++*pindex; - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (kwds && name) { v = PyDict_GetItemWithError(kwds, name); if (v) { ++*pindex; - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; } } if (defval) { - Py_INCREF(defval); - return defval; + return Py_NewRef(defval); } /* we can't currently emit a better error message */ if (name) @@ -3878,8 +3817,7 @@ _build_callargs(PyCFuncPtrObject *self, PyObject *argtypes, if (self->index) return PyTuple_GetSlice(inargs, 1, PyTuple_GET_SIZE(inargs)); #endif - Py_INCREF(inargs); - return inargs; + return Py_NewRef(inargs); } len = PyTuple_GET_SIZE(argtypes); @@ -4062,10 +4000,9 @@ _build_result(PyObject *result, PyObject *callargs, PyTuple_SET_ITEM(tup, index, v); index++; } else if (bit & outmask) { - _Py_IDENTIFIER(__ctypes_from_outparam__); v = PyTuple_GET_ITEM(callargs, i); - v = _PyObject_CallMethodIdNoArgs(v, &PyId___ctypes_from_outparam__); + v = PyObject_CallMethodNoArgs(v, &_Py_ID(__ctypes_from_outparam__)); if (v == NULL || numretvals == 1) { Py_DECREF(callargs); return v; @@ -4348,7 +4285,6 @@ _init_pos_args(PyObject *self, PyTypeObject *type, StgDictObject *dict; PyObject *fields; Py_ssize_t i; - _Py_IDENTIFIER(_fields_); if (PyType_stgdict((PyObject *)type->tp_base)) { index = _init_pos_args(self, type->tp_base, @@ -4359,7 +4295,7 @@ _init_pos_args(PyObject *self, PyTypeObject *type, } dict = PyType_stgdict((PyObject *)type); - fields = _PyDict_GetItemIdWithError((PyObject *)dict, &PyId__fields_); + fields = PyDict_GetItemWithError((PyObject *)dict, &_Py_ID(_fields_)); if (fields == NULL) { if (PyErr_Occurred()) { return -1; @@ -4968,8 +4904,7 @@ static PyObject * Simple_from_outparm(PyObject *self, PyObject *args) { if (_ctypes_simple_instance((PyObject *)Py_TYPE(self))) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } /* call stgdict->getfunc */ return Simple_get_value((CDataObject *)self, NULL); @@ -5603,8 +5538,7 @@ cast(void *ptr, PyObject *src, PyObject *ctype) if (obj->b_objects == NULL) goto failed; } - Py_XINCREF(obj->b_objects); - result->b_objects = obj->b_objects; + result->b_objects = Py_XNewRef(obj->b_objects); if (result->b_objects && PyDict_CheckExact(result->b_objects)) { PyObject *index; int rc; diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index 2a668c0ca0cc97..459632b90907fd 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -1,7 +1,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" // windows.h must be included before pycore internal headers @@ -9,7 +8,9 @@ # include <windows.h> #endif -#include "pycore_call.h" // _PyObject_CallNoArgs() +#include "pycore_call.h" // _PyObject_CallNoArgs() +#include "pycore_runtime.h" // _PyRuntime +#include "pycore_global_objects.h" // _Py_ID() #include <stdbool.h> @@ -125,9 +126,7 @@ static void TryAddRef(StgDictObject *dict, CDataObject *obj) { IUnknown *punk; - _Py_IDENTIFIER(_needs_com_addref_); - - int r = _PyDict_ContainsId((PyObject *)dict, &PyId__needs_com_addref_); + int r = PyDict_Contains((PyObject *)dict, &_Py_ID(_needs_com_addref_)); if (r <= 0) { if (r < 0) { PrintError("getting _needs_com_addref_"); @@ -375,8 +374,7 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, } p->atypes[i] = NULL; - Py_INCREF(restype); - p->restype = restype; + p->restype = Py_NewRef(restype); if (restype == Py_None) { p->setfunc = NULL; p->ffi_restype = &ffi_type_void; @@ -405,9 +403,15 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, "ffi_prep_cif failed with %d", result); goto error; } + + #if HAVE_FFI_PREP_CLOSURE_LOC # ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE # define HAVE_FFI_PREP_CLOSURE_LOC_RUNTIME __builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *) +# else +# define HAVE_FFI_PREP_CLOSURE_LOC_RUNTIME (ffi_prep_closure_loc != NULL) +# endif # else # define HAVE_FFI_PREP_CLOSURE_LOC_RUNTIME 1 # endif @@ -447,10 +451,8 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, goto error; } - Py_INCREF(converters); - p->converters = converters; - Py_INCREF(callable); - p->callable = callable; + p->converters = Py_NewRef(converters); + p->callable = Py_NewRef(callable); return p; error: diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index fa1dfac6c7d946..28b7cd4069718b 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -57,7 +57,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" #include "structmember.h" // PyMemberDef @@ -97,8 +96,12 @@ #define DONT_USE_SEH #endif +#include "pycore_runtime.h" // _PyRuntime +#include "pycore_global_objects.h" // _Py_ID() + #define CTYPES_CAPSULE_NAME_PYMEM "_ctypes pymem" + static void pymem_destructor(PyObject *ptr) { void *p = PyCapsule_GetPointer(ptr, CTYPES_CAPSULE_NAME_PYMEM); @@ -669,8 +672,7 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) if (PyCArg_CheckExact(obj)) { PyCArgObject *carg = (PyCArgObject *)obj; pa->ffi_type = carg->pffi_type; - Py_INCREF(obj); - pa->keep = obj; + pa->keep = Py_NewRef(obj); memcpy(&pa->value, &carg->value, sizeof(pa->value)); return 0; } @@ -700,8 +702,7 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) if (PyBytes_Check(obj)) { pa->ffi_type = &ffi_type_pointer; pa->value.p = PyBytes_AsString(obj); - Py_INCREF(obj); - pa->keep = obj; + pa->keep = Py_NewRef(obj); return 0; } @@ -719,9 +720,8 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) } { - _Py_IDENTIFIER(_as_parameter_); PyObject *arg; - if (_PyObject_LookupAttrId(obj, &PyId__as_parameter_, &arg) < 0) { + if (_PyObject_LookupAttr(obj, &_Py_ID(_as_parameter_), &arg) < 0) { return -1; } /* Which types should we exactly allow here? @@ -832,7 +832,11 @@ static int _call_function_pointer(int flags, #endif # ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE # define HAVE_FFI_PREP_CIF_VAR_RUNTIME __builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *) +# else +# define HAVE_FFI_PREP_CIF_VAR_RUNTIME (ffi_prep_cif_var != NULL) +# endif # elif HAVE_FFI_PREP_CIF_VAR # define HAVE_FFI_PREP_CIF_VAR_RUNTIME true # else @@ -1019,7 +1023,10 @@ void _ctypes_extend_error(PyObject *exc_class, const char *fmt, ...) PyErr_Fetch(&tp, &v, &tb); PyErr_NormalizeException(&tp, &v, &tb); - cls_str = PyObject_Str(tp); + if (PyType_Check(tp)) + cls_str = PyType_GetName((PyTypeObject *)tp); + else + cls_str = PyObject_Str(tp); if (cls_str) { PyUnicode_AppendAndDel(&s, cls_str); PyUnicode_AppendAndDel(&s, PyUnicode_FromString(": ")); @@ -1442,8 +1449,13 @@ copy_com_pointer(PyObject *self, PyObject *args) #else #ifdef __APPLE__ #ifdef HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH -#define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \ - __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) +# ifdef HAVE_BUILTIN_AVAILABLE +# define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \ + __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) +# else +# define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \ + (_dyld_shared_cache_contains_path != NULL) +# endif #else // Support the deprecated case of compiling on an older macOS version static void *libsystem_b_handle; @@ -1729,8 +1741,7 @@ byref(PyObject *self, PyObject *args) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; - Py_INCREF(obj); - parg->obj = obj; + parg->obj = Py_NewRef(obj); parg->value.p = (char *)((CDataObject *)obj)->b_ptr + offset; return (PyObject *)parg; } @@ -1770,8 +1781,7 @@ My_PyObj_FromPtr(PyObject *self, PyObject *args) if (PySys_Audit("ctypes.PyObj_FromPtr", "(O)", ob) < 0) { return NULL; } - Py_INCREF(ob); - return ob; + return Py_NewRef(ob); } static PyObject * @@ -1848,16 +1858,14 @@ static PyObject * unpickle(PyObject *self, PyObject *args) { PyObject *typ, *state, *meth, *obj, *result; - _Py_IDENTIFIER(__new__); - _Py_IDENTIFIER(__setstate__); if (!PyArg_ParseTuple(args, "OO!", &typ, &PyTuple_Type, &state)) return NULL; - obj = _PyObject_CallMethodIdOneArg(typ, &PyId___new__, typ); + obj = PyObject_CallMethodOneArg(typ, &_Py_ID(__new__), typ); if (obj == NULL) return NULL; - meth = _PyObject_GetAttrId(obj, &PyId___setstate__); + meth = PyObject_GetAttr(obj, &_Py_ID(__setstate__)); if (meth == NULL) { goto error; } @@ -1882,29 +1890,20 @@ POINTER(PyObject *self, PyObject *cls) PyObject *result; PyTypeObject *typ; PyObject *key; - char *buf; result = PyDict_GetItemWithError(_ctypes_ptrtype_cache, cls); if (result) { - Py_INCREF(result); - return result; + return Py_NewRef(result); } else if (PyErr_Occurred()) { return NULL; } if (PyUnicode_CheckExact(cls)) { - const char *name = PyUnicode_AsUTF8(cls); - if (name == NULL) - return NULL; - buf = PyMem_Malloc(strlen(name) + 3 + 1); - if (buf == NULL) - return PyErr_NoMemory(); - sprintf(buf, "LP_%s", name); + PyObject *name = PyUnicode_FromFormat("LP_%U", cls); result = PyObject_CallFunction((PyObject *)Py_TYPE(&PyCPointer_Type), - "s(O){}", - buf, + "N(O){}", + name, &PyCPointer_Type); - PyMem_Free(buf); if (result == NULL) return result; key = PyLong_FromVoidPtr(result); @@ -1914,20 +1913,15 @@ POINTER(PyObject *self, PyObject *cls) } } else if (PyType_Check(cls)) { typ = (PyTypeObject *)cls; - buf = PyMem_Malloc(strlen(typ->tp_name) + 3 + 1); - if (buf == NULL) - return PyErr_NoMemory(); - sprintf(buf, "LP_%s", typ->tp_name); + PyObject *name = PyUnicode_FromFormat("LP_%s", typ->tp_name); result = PyObject_CallFunction((PyObject *)Py_TYPE(&PyCPointer_Type), - "s(O){sO}", - buf, + "N(O){sO}", + name, &PyCPointer_Type, "_type_", cls); - PyMem_Free(buf); if (result == NULL) return result; - Py_INCREF(cls); - key = cls; + key = Py_NewRef(cls); } else { PyErr_SetString(PyExc_TypeError, "must be a ctypes type"); return NULL; diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 13ed8b7eda6555..791aeba66539e9 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -144,8 +144,7 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, self->getfunc = getfunc; self->index = index; - Py_INCREF(proto); - self->proto = proto; + self->proto = Py_NewRef(proto); switch (fieldtype) { case NEW_BITFIELD: @@ -231,8 +230,7 @@ PyCField_get(CFieldObject *self, PyObject *inst, PyTypeObject *type) { CDataObject *src; if (inst == NULL) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } if (!CDataObject_Check(inst)) { PyErr_SetString(PyExc_TypeError, @@ -1097,8 +1095,7 @@ O_get(void *ptr, Py_ssize_t size) "PyObject is NULL"); return NULL; } - Py_INCREF(ob); - return ob; + return Py_NewRef(ob); } static PyObject * @@ -1106,8 +1103,7 @@ O_set(void *ptr, PyObject *value, Py_ssize_t size) { /* Hm, does the memory block need it's own refcount or not? */ *(PyObject **)ptr = value; - Py_INCREF(value); - return value; + return Py_NewRef(value); } @@ -1233,8 +1229,7 @@ U_set(void *ptr, PyObject *value, Py_ssize_t length) return NULL; } - Py_INCREF(value); - return value; + return Py_NewRef(value); } @@ -1292,13 +1287,11 @@ z_set(void *ptr, PyObject *value, Py_ssize_t size) { if (value == Py_None) { *(char **)ptr = NULL; - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyBytes_Check(value)) { *(const char **)ptr = PyBytes_AsString(value); - Py_INCREF(value); - return value; + return Py_NewRef(value); } else if (PyLong_Check(value)) { #if SIZEOF_VOID_P == SIZEOF_LONG_LONG *(char **)ptr = (char *)PyLong_AsUnsignedLongLongMask(value); @@ -1334,8 +1327,7 @@ Z_set(void *ptr, PyObject *value, Py_ssize_t size) if (value == Py_None) { *(wchar_t **)ptr = NULL; - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyLong_Check(value)) { #if SIZEOF_VOID_P == SIZEOF_LONG_LONG diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 88eb9f59922a04..a7029b6e6da2b8 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -26,6 +26,12 @@ #endif #endif +#if defined(__has_builtin) +#if __has_builtin(__builtin_available) +#define HAVE_BUILTIN_AVAILABLE 1 +#endif +#endif + typedef struct tagPyCArgObject PyCArgObject; typedef struct tagCDataObject CDataObject; typedef PyObject *(* GETFUNC)(void *, Py_ssize_t size); diff --git a/Modules/_ctypes/malloc_closure.c b/Modules/_ctypes/malloc_closure.c index d47153f1d7f3e8..3a859322772ba7 100644 --- a/Modules/_ctypes/malloc_closure.c +++ b/Modules/_ctypes/malloc_closure.c @@ -23,6 +23,7 @@ /* #define MALLOC_CLOSURE_DEBUG */ /* enable for some debugging output */ + /******************************************************************/ typedef union _tagITEM { @@ -96,7 +97,11 @@ void Py_ffi_closure_free(void *p) { #ifdef HAVE_FFI_CLOSURE_ALLOC #ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE if (__builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *)) { +# else + if (ffi_closure_free != NULL) { +# endif #endif ffi_closure_free(p); return; @@ -114,7 +119,11 @@ void *Py_ffi_closure_alloc(size_t size, void** codeloc) { #ifdef HAVE_FFI_CLOSURE_ALLOC #ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE if (__builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *)) { +# else + if (ffi_closure_alloc != NULL) { +# endif #endif return ffi_closure_alloc(size, codeloc); #ifdef USING_APPLE_OS_LIBFFI diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index a819ce910d4b52..099331ca8bdb9c 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -1,7 +1,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" // windows.h must be included before pycore internal headers @@ -268,8 +267,7 @@ MakeFields(PyObject *type, CFieldObject *descr, new_descr->size = fdescr->size; new_descr->offset = fdescr->offset + offset; new_descr->index = fdescr->index + index; - new_descr->proto = fdescr->proto; - Py_XINCREF(new_descr->proto); + new_descr->proto = Py_XNewRef(fdescr->proto); new_descr->getfunc = fdescr->getfunc; new_descr->setfunc = fdescr->setfunc; @@ -291,12 +289,11 @@ MakeFields(PyObject *type, CFieldObject *descr, static int MakeAnonFields(PyObject *type) { - _Py_IDENTIFIER(_anonymous_); PyObject *anon; PyObject *anon_names; Py_ssize_t i; - if (_PyObject_LookupAttrId(type, &PyId__anonymous_, &anon) < 0) { + if (_PyObject_LookupAttr(type, &_Py_ID(_anonymous_), &anon) < 0) { return -1; } if (anon == NULL) { @@ -347,9 +344,6 @@ MakeAnonFields(PyObject *type) int PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct) { - _Py_IDENTIFIER(_swappedbytes_); - _Py_IDENTIFIER(_use_broken_old_ctypes_structure_semantics_); - _Py_IDENTIFIER(_pack_); StgDictObject *stgdict, *basedict; Py_ssize_t len, offset, size, align, i; Py_ssize_t union_size, total_align; @@ -362,18 +356,10 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct int big_endian; int arrays_seen = 0; - /* HACK Alert: I cannot be bothered to fix ctypes.com, so there has to - be a way to use the old, broken semantics: _fields_ are not extended - but replaced in subclasses. - - XXX Remove this in ctypes 1.0! - */ - int use_broken_old_ctypes_semantics; - if (fields == NULL) return 0; - if (_PyObject_LookupAttrId(type, &PyId__swappedbytes_, &tmp) < 0) { + if (_PyObject_LookupAttr(type, &_Py_ID(_swappedbytes_), &tmp) < 0) { return -1; } if (tmp) { @@ -384,20 +370,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct big_endian = PY_BIG_ENDIAN; } - if (_PyObject_LookupAttrId(type, - &PyId__use_broken_old_ctypes_structure_semantics_, &tmp) < 0) - { - return -1; - } - if (tmp) { - Py_DECREF(tmp); - use_broken_old_ctypes_semantics = 1; - } - else { - use_broken_old_ctypes_semantics = 0; - } - - if (_PyObject_LookupAttrId(type, &PyId__pack_, &tmp) < 0) { + if (_PyObject_LookupAttr(type, &_Py_ID(_pack_), &tmp) < 0) { return -1; } if (tmp) { @@ -430,8 +403,11 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct } stgdict = PyType_stgdict(type); - if (!stgdict) + if (!stgdict) { + PyErr_SetString(PyExc_TypeError, + "ctypes state is not initialized"); return -1; + } /* If this structure/union is already marked final we cannot assign _fields_ anymore. */ @@ -457,7 +433,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct if (!isStruct) { stgdict->flags |= TYPEFLAG_HASUNION; } - if (basedict && !use_broken_old_ctypes_semantics) { + if (basedict) { size = offset = basedict->size; align = basedict->align; union_size = 0; diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c index 0b328f9665a0a2..2144345de01ba3 100644 --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -261,8 +261,7 @@ PyCursesPanel_New(_curses_panel_state *state, PANEL *pan, Py_DECREF(po); return NULL; } - po->wo = wo; - Py_INCREF(wo); + po->wo = (PyCursesWindowObject*)Py_NewRef(wo); return (PyObject *)po; } @@ -313,8 +312,7 @@ _curses_panel_panel_above_impl(PyCursesPanelObject *self) "panel_above: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /* panel_below(NULL) returns the top panel in the stack. To get @@ -344,8 +342,7 @@ _curses_panel_panel_below_impl(PyCursesPanelObject *self) "panel_below: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /*[clinic input] @@ -394,8 +391,7 @@ static PyObject * _curses_panel_panel_window_impl(PyCursesPanelObject *self) /*[clinic end generated code: output=5f05940d4106b4cb input=6067353d2c307901]*/ { - Py_INCREF(self->wo); - return (PyObject *)self->wo; + return Py_NewRef(self->wo); } /*[clinic input] @@ -428,8 +424,7 @@ _curses_panel_panel_replace_impl(PyCursesPanelObject *self, PyErr_SetString(state->PyCursesError, "replace_panel() returned ERR"); return NULL; } - Py_INCREF(win); - Py_SETREF(po->wo, win); + Py_SETREF(po->wo, (PyCursesWindowObject*)Py_NewRef(win)); Py_RETURN_NONE; } @@ -486,8 +481,7 @@ _curses_panel_panel_userptr_impl(PyCursesPanelObject *self, return NULL; } - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } @@ -555,8 +549,7 @@ _curses_panel_bottom_panel_impl(PyObject *module) "panel_above: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /*[clinic input] @@ -614,8 +607,7 @@ _curses_panel_top_panel_impl(PyObject *module) "panel_below: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /*[clinic input] @@ -670,8 +662,7 @@ _curses_panel_exec(PyObject *mod) state->PyCursesError = PyErr_NewException( "_curses_panel.error", NULL, NULL); - Py_INCREF(state->PyCursesError); - if (PyModule_AddObject(mod, "error", state->PyCursesError) < 0) { + if (PyModule_AddObject(mod, "error", Py_NewRef(state->PyCursesError)) < 0) { Py_DECREF(state->PyCursesError); return -1; } diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index c10b2b302c6024..5691a419a32f8e 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -103,7 +103,6 @@ static const char PyCursesVersion[] = "2.2"; #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #define PY_SSIZE_T_CLEAN @@ -390,8 +389,7 @@ PyCurses_ConvertToString(PyCursesWindowObject *win, PyObject *obj, #endif } else if (PyBytes_Check(obj)) { - Py_INCREF(obj); - *bytes = obj; + *bytes = Py_NewRef(obj); /* check for embedded null bytes */ if (PyBytes_AsStringAndSize(*bytes, &str, NULL) < 0) { Py_DECREF(obj); @@ -2177,12 +2175,11 @@ _curses_window_putwin(PyCursesWindowObject *self, PyObject *file) while (1) { char buf[BUFSIZ]; Py_ssize_t n = fread(buf, 1, BUFSIZ, fp); - _Py_IDENTIFIER(write); if (n <= 0) break; Py_DECREF(res); - res = _PyObject_CallMethodId(file, &PyId_write, "y#", buf, n); + res = PyObject_CallMethod(file, "write", "y#", buf, n); if (res == NULL) break; } @@ -2374,7 +2371,7 @@ _curses.window.touchline start: int count: int [ - changed: bool(accept={int}) = True + changed: bool = True ] / @@ -2387,7 +2384,7 @@ as having been changed (changed=True) or unchanged (changed=False). static PyObject * _curses_window_touchline_impl(PyCursesWindowObject *self, int start, int count, int group_right_1, int changed) -/*[clinic end generated code: output=65d05b3f7438c61d input=918ad1cbdadf93ea]*/ +/*[clinic end generated code: output=65d05b3f7438c61d input=a98aa4f79b6be845]*/ { if (!group_right_1) { return PyCursesCheckERR(touchline(self->win, start, count), "touchline"); @@ -2709,7 +2706,7 @@ NoArgTrueFalseFunctionBody(can_change_color) /*[clinic input] _curses.cbreak - flag: bool(accept={int}) = True + flag: bool = True If false, the effect is the same as calling nocbreak(). / @@ -2724,7 +2721,7 @@ Calling first raw() then cbreak() leaves the terminal in cbreak mode. static PyObject * _curses_cbreak_impl(PyObject *module, int flag) -/*[clinic end generated code: output=9f9dee9664769751 input=150be619eb1f1458]*/ +/*[clinic end generated code: output=9f9dee9664769751 input=c7d0bddda93016c1]*/ NoArgOrFlagNoReturnFunctionBody(cbreak, flag) /*[clinic input] @@ -2873,7 +2870,7 @@ NoArgNoReturnFunctionBody(doupdate) /*[clinic input] _curses.echo - flag: bool(accept={int}) = True + flag: bool = True If false, the effect is the same as calling noecho(). / @@ -2884,7 +2881,7 @@ In echo mode, each character input is echoed to the screen as it is entered. static PyObject * _curses_echo_impl(PyObject *module, int flag) -/*[clinic end generated code: output=03acb2ddfa6c8729 input=2e9e891d637eac5d]*/ +/*[clinic end generated code: output=03acb2ddfa6c8729 input=86cd4d5bb1d569c0]*/ NoArgOrFlagNoReturnFunctionBody(echo, flag) /*[clinic input] @@ -3051,7 +3048,6 @@ _curses_getwin(PyObject *module, PyObject *file) PyObject *data; size_t datalen; WINDOW *win; - _Py_IDENTIFIER(read); PyObject *res = NULL; PyCursesInitialised; @@ -3063,7 +3059,7 @@ _curses_getwin(PyObject *module, PyObject *file) if (_Py_set_inheritable(fileno(fp), 0, NULL) < 0) goto error; - data = _PyObject_CallMethodIdNoArgs(file, &PyId_read); + data = PyObject_CallMethod(file, "read", NULL); if (data == NULL) goto error; if (!PyBytes_Check(data)) { @@ -3500,14 +3496,14 @@ _curses_set_tabsize_impl(PyObject *module, int size) /*[clinic input] _curses.intrflush - flag: bool(accept={int}) + flag: bool / [clinic start generated code]*/ static PyObject * _curses_intrflush_impl(PyObject *module, int flag) -/*[clinic end generated code: output=c1986df35e999a0f input=fcba57bb28dfd795]*/ +/*[clinic end generated code: output=c1986df35e999a0f input=c65fe2ef973fe40a]*/ { PyCursesInitialised; @@ -3609,7 +3605,7 @@ NoArgReturnStringFunctionBody(longname) /*[clinic input] _curses.meta - yes: bool(accept={int}) + yes: bool / Enable/disable meta keys. @@ -3620,7 +3616,7 @@ allow only 7-bit characters. static PyObject * _curses_meta_impl(PyObject *module, int yes) -/*[clinic end generated code: output=22f5abda46a605d8 input=af9892e3a74f35db]*/ +/*[clinic end generated code: output=22f5abda46a605d8 input=cfe7da79f51d0e30]*/ { PyCursesInitialised; @@ -3770,7 +3766,7 @@ _curses_newwin_impl(PyObject *module, int nlines, int ncols, /*[clinic input] _curses.nl - flag: bool(accept={int}) = True + flag: bool = True If false, the effect is the same as calling nonl(). / @@ -3782,7 +3778,7 @@ newline into return and line-feed on output. Newline mode is initially on. static PyObject * _curses_nl_impl(PyObject *module, int flag) -/*[clinic end generated code: output=b39cc0ffc9015003 input=cf36a63f7b86e28a]*/ +/*[clinic end generated code: output=b39cc0ffc9015003 input=18e3e9c6e8cfcf6f]*/ NoArgOrFlagNoReturnFunctionBody(nl, flag) /*[clinic input] @@ -3929,7 +3925,7 @@ _curses_putp_impl(PyObject *module, const char *string) /*[clinic input] _curses.qiflush - flag: bool(accept={int}) = True + flag: bool = True If false, the effect is the same as calling noqiflush(). / @@ -3941,7 +3937,7 @@ will be flushed when the INTR, QUIT and SUSP characters are read. static PyObject * _curses_qiflush_impl(PyObject *module, int flag) -/*[clinic end generated code: output=9167e862f760ea30 input=e9e4a389946a0dbc]*/ +/*[clinic end generated code: output=9167e862f760ea30 input=6ec8b3e2b717ec40]*/ { PyCursesInitialised; @@ -3962,8 +3958,6 @@ update_lines_cols(void) { PyObject *o; PyObject *m = PyImport_ImportModule("curses"); - _Py_IDENTIFIER(LINES); - _Py_IDENTIFIER(COLS); if (!m) return 0; @@ -3973,13 +3967,12 @@ update_lines_cols(void) Py_DECREF(m); return 0; } - if (_PyObject_SetAttrId(m, &PyId_LINES, o)) { + if (PyObject_SetAttrString(m, "LINES", o)) { Py_DECREF(m); Py_DECREF(o); return 0; } - /* PyId_LINES.object will be initialized here. */ - if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_LINES), o)) { + if (PyDict_SetItemString(ModDict, "LINES", o)) { Py_DECREF(m); Py_DECREF(o); return 0; @@ -3990,12 +3983,12 @@ update_lines_cols(void) Py_DECREF(m); return 0; } - if (_PyObject_SetAttrId(m, &PyId_COLS, o)) { + if (PyObject_SetAttrString(m, "COLS", o)) { Py_DECREF(m); Py_DECREF(o); return 0; } - if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_COLS), o)) { + if (PyDict_SetItemString(ModDict, "COLS", o)) { Py_DECREF(m); Py_DECREF(o); return 0; @@ -4025,7 +4018,7 @@ _curses_update_lines_cols_impl(PyObject *module) /*[clinic input] _curses.raw - flag: bool(accept={int}) = True + flag: bool = True If false, the effect is the same as calling noraw(). / @@ -4038,7 +4031,7 @@ curses input functions one by one. static PyObject * _curses_raw_impl(PyObject *module, int flag) -/*[clinic end generated code: output=a750e4b342be015b input=e36d8db27832b848]*/ +/*[clinic end generated code: output=a750e4b342be015b input=4b447701389fb4df]*/ NoArgOrFlagNoReturnFunctionBody(raw, flag) /*[clinic input] @@ -4510,7 +4503,7 @@ _curses_unget_wch(PyObject *module, PyObject *ch) /*[clinic input] _curses.use_env - flag: bool(accept={int}) + flag: bool / Use environment variables LINES and COLUMNS. @@ -4527,7 +4520,7 @@ not set). static PyObject * _curses_use_env_impl(PyObject *module, int flag) -/*[clinic end generated code: output=b2c445e435c0b164 input=1778eb1e9151ea37]*/ +/*[clinic end generated code: output=b2c445e435c0b164 input=06ac30948f2d78e4]*/ { use_env(flag); Py_RETURN_NONE; diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index d86418af0dc1a8..eda8c5610ba659 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -10,7 +10,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" #include "pycore_long.h" // _PyLong_GetOne() @@ -141,10 +140,6 @@ static PyTypeObject PyDateTime_TimeZoneType; static int check_tzinfo_subclass(PyObject *p); -_Py_IDENTIFIER(as_integer_ratio); -_Py_IDENTIFIER(fromutc); -_Py_IDENTIFIER(isoformat); -_Py_IDENTIFIER(strftime); /* --------------------------------------------------------------------------- * Math utilities. @@ -194,8 +189,7 @@ divide_nearest(PyObject *m, PyObject *n) temp = _PyLong_DivmodNear(m, n); if (temp == NULL) return NULL; - result = PyTuple_GET_ITEM(temp, 0); - Py_INCREF(result); + result = Py_NewRef(PyTuple_GET_ITEM(temp, 0)); Py_DECREF(temp); return result; @@ -1010,8 +1004,7 @@ new_datetime_ex2(int year, int month, int day, int hour, int minute, DATE_SET_SECOND(self, second); DATE_SET_MICROSECOND(self, usecond); if (aware) { - Py_INCREF(tzinfo); - self->tzinfo = tzinfo; + self->tzinfo = Py_NewRef(tzinfo); } DATE_SET_FOLD(self, fold); } @@ -1088,8 +1081,7 @@ new_time_ex2(int hour, int minute, int second, int usecond, TIME_SET_SECOND(self, second); TIME_SET_MICROSECOND(self, usecond); if (aware) { - Py_INCREF(tzinfo); - self->tzinfo = tzinfo; + self->tzinfo = Py_NewRef(tzinfo); } TIME_SET_FOLD(self, fold); } @@ -1170,10 +1162,8 @@ create_timezone(PyObject *offset, PyObject *name) if (self == NULL) { return NULL; } - Py_INCREF(offset); - self->offset = offset; - Py_XINCREF(name); - self->name = name; + self->offset = Py_NewRef(offset); + self->name = Py_XNewRef(name); return (PyObject *)self; } @@ -1187,8 +1177,7 @@ new_timezone(PyObject *offset, PyObject *name) assert(name == NULL || PyUnicode_Check(name)); if (name == NULL && delta_bool((PyDateTime_Delta *)offset) == 0) { - Py_INCREF(PyDateTime_TimeZone_UTC); - return PyDateTime_TimeZone_UTC; + return Py_NewRef(PyDateTime_TimeZone_UTC); } if ((GET_TD_DAYS(offset) == -1 && GET_TD_SECONDS(offset) == 0 && @@ -1323,8 +1312,6 @@ static PyObject * call_tzname(PyObject *tzinfo, PyObject *tzinfoarg) { PyObject *result; - _Py_IDENTIFIER(tzname); - assert(tzinfo != NULL); assert(check_tzinfo_subclass(tzinfo) >= 0); assert(tzinfoarg != NULL); @@ -1332,7 +1319,7 @@ call_tzname(PyObject *tzinfo, PyObject *tzinfoarg) if (tzinfo == Py_None) Py_RETURN_NONE; - result = _PyObject_CallMethodIdOneArg(tzinfo, &PyId_tzname, tzinfoarg); + result = PyObject_CallMethodOneArg(tzinfo, &_Py_ID(tzname), tzinfoarg); if (result == NULL || result == Py_None) return result; @@ -1341,8 +1328,7 @@ call_tzname(PyObject *tzinfo, PyObject *tzinfoarg) PyErr_Format(PyExc_TypeError, "tzinfo.tzname() must " "return None or a string, not '%s'", Py_TYPE(result)->tp_name); - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } return result; @@ -1404,8 +1390,7 @@ tzinfo_from_isoformat_results(int rv, int tzoffset, int tz_useconds) if (rv == 1) { // Create a timezone from offset in seconds (0 returns UTC) if (tzoffset == 0) { - Py_INCREF(PyDateTime_TimeZone_UTC); - return PyDateTime_TimeZone_UTC; + return Py_NewRef(PyDateTime_TimeZone_UTC); } PyObject *delta = new_delta(0, tzoffset, tz_useconds, 1); @@ -1416,8 +1401,7 @@ tzinfo_from_isoformat_results(int rv, int tzoffset, int tz_useconds) Py_DECREF(delta); } else { - tzinfo = Py_None; - Py_INCREF(Py_None); + tzinfo = Py_NewRef(Py_None); } return tzinfo; @@ -1515,7 +1499,7 @@ make_somezreplacement(PyObject *object, char *sep, PyObject *tzinfoarg) if (tzinfo == Py_None || tzinfo == NULL) { return PyBytes_FromStringAndSize(NULL, 0); } - + assert(tzinfoarg != NULL); if (format_utcoffset(buf, sizeof(buf), @@ -1523,7 +1507,7 @@ make_somezreplacement(PyObject *object, char *sep, PyObject *tzinfoarg) tzinfo, tzinfoarg) < 0) return NULL; - + return PyBytes_FromStringAndSize(buf, strlen(buf)); } @@ -1533,7 +1517,6 @@ make_Zreplacement(PyObject *object, PyObject *tzinfoarg) PyObject *temp; PyObject *tzinfo = get_tzinfo_member(object); PyObject *Zreplacement = PyUnicode_FromStringAndSize(NULL, 0); - _Py_IDENTIFIER(replace); if (Zreplacement == NULL) return NULL; @@ -1555,7 +1538,7 @@ make_Zreplacement(PyObject *object, PyObject *tzinfoarg) * strftime doesn't treat them as format codes. */ Py_DECREF(Zreplacement); - Zreplacement = _PyObject_CallMethodId(temp, &PyId_replace, "ss", "%", "%%"); + Zreplacement = PyObject_CallMethod(temp, "replace", "ss", "%", "%%"); Py_DECREF(temp); if (Zreplacement == NULL) return NULL; @@ -1865,8 +1848,7 @@ delta_to_microseconds(PyDateTime_Delta *self) x2 = PyNumber_Multiply(x1, seconds_per_day); /* days in seconds */ if (x2 == NULL) goto Done; - Py_DECREF(x1); - x1 = NULL; + Py_SETREF(x1, NULL); /* x2 has days in seconds */ x1 = PyLong_FromLong(GET_TD_SECONDS(self)); /* seconds */ @@ -1883,8 +1865,7 @@ delta_to_microseconds(PyDateTime_Delta *self) x1 = PyNumber_Multiply(x3, us_per_second); /* us */ if (x1 == NULL) goto Done; - Py_DECREF(x3); - x3 = NULL; + Py_SETREF(x3, NULL); /* x1 has days+seconds in us */ x2 = PyLong_FromLong(GET_TD_MICROSECONDS(self)); @@ -1951,8 +1932,7 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) goto BadDivmod; } - num = PyTuple_GET_ITEM(tuple, 0); /* leftover seconds */ - Py_INCREF(num); + num = Py_NewRef(PyTuple_GET_ITEM(tuple, 0)); /* leftover seconds */ Py_DECREF(tuple); tuple = checked_divmod(num, seconds_per_day); @@ -1970,8 +1950,7 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) goto BadDivmod; } - num = PyTuple_GET_ITEM(tuple, 0); /* leftover days */ - Py_INCREF(num); + num = Py_NewRef(PyTuple_GET_ITEM(tuple, 0)); /* leftover days */ d = _PyLong_AsInt(num); if (d == -1 && PyErr_Occurred()) { goto Done; @@ -2019,7 +1998,7 @@ get_float_as_integer_ratio(PyObject *floatobj) PyObject *ratio; assert(floatobj && PyFloat_Check(floatobj)); - ratio = _PyObject_CallMethodIdNoArgs(floatobj, &PyId_as_integer_ratio); + ratio = PyObject_CallMethodNoArgs(floatobj, &_Py_ID(as_integer_ratio)); if (ratio == NULL) { return NULL; } @@ -2056,8 +2035,7 @@ multiply_truedivide_timedelta_float(PyDateTime_Delta *delta, PyObject *floatobj, goto error; } temp = PyNumber_Multiply(pyus_in, PyTuple_GET_ITEM(ratio, op)); - Py_DECREF(pyus_in); - pyus_in = NULL; + Py_SETREF(pyus_in, NULL); if (temp == NULL) goto error; pyus_out = divide_nearest(temp, PyTuple_GET_ITEM(ratio, !op)); @@ -2982,8 +2960,6 @@ date_today(PyObject *cls, PyObject *dummy) { PyObject *time; PyObject *result; - _Py_IDENTIFIER(fromtimestamp); - time = time_time(); if (time == NULL) return NULL; @@ -2994,7 +2970,7 @@ date_today(PyObject *cls, PyObject *dummy) * time.time() delivers; if someone were gonzo about optimization, * date.today() could get away with plain C time(). */ - result = _PyObject_CallMethodIdOneArg(cls, &PyId_fromtimestamp, time); + result = PyObject_CallMethodOneArg(cls, &_Py_ID(fromtimestamp), time); Py_DECREF(time); return result; } @@ -3245,7 +3221,7 @@ date_isoformat(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored)) static PyObject * date_str(PyDateTime_Date *self) { - return _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_isoformat); + return PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(isoformat)); } @@ -3264,14 +3240,13 @@ date_strftime(PyDateTime_Date *self, PyObject *args, PyObject *kw) PyObject *result; PyObject *tuple; PyObject *format; - _Py_IDENTIFIER(timetuple); static char *keywords[] = {"format", NULL}; if (! PyArg_ParseTupleAndKeywords(args, kw, "U:strftime", keywords, &format)) return NULL; - tuple = _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_timetuple); + tuple = PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(timetuple)); if (tuple == NULL) return NULL; result = wrap_strftime((PyObject *)self, format, tuple, @@ -3292,7 +3267,7 @@ date_format(PyDateTime_Date *self, PyObject *args) if (PyUnicode_GetLength(format) == 0) return PyObject_Str((PyObject *)self); - return _PyObject_CallMethodIdOneArg((PyObject *)self, &PyId_strftime, + return PyObject_CallMethodOneArg((PyObject *)self, &_Py_ID(strftime), format); } @@ -3357,8 +3332,7 @@ iso_calendar_date_year(PyDateTime_IsoCalendarDate *self, void *unused) if (year == NULL) { return NULL; } - Py_INCREF(year); - return year; + return Py_NewRef(year); } static PyObject * @@ -3368,8 +3342,7 @@ iso_calendar_date_week(PyDateTime_IsoCalendarDate *self, void *unused) if (week == NULL) { return NULL; } - Py_INCREF(week); - return week; + return Py_NewRef(week); } static PyObject * @@ -3379,8 +3352,7 @@ iso_calendar_date_weekday(PyDateTime_IsoCalendarDate *self, void *unused) if (weekday == NULL) { return NULL; } - Py_INCREF(weekday); - return weekday; + return Py_NewRef(weekday); } static PyGetSetDef iso_calendar_date_getset[] = { @@ -3821,9 +3793,8 @@ tzinfo_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *args, *state; PyObject *getinitargs; - _Py_IDENTIFIER(__getinitargs__); - if (_PyObject_LookupAttrId(self, &PyId___getinitargs__, &getinitargs) < 0) { + if (_PyObject_LookupAttr(self, &_Py_ID(__getinitargs__), &getinitargs) < 0) { return NULL; } if (getinitargs != NULL) { @@ -3992,8 +3963,7 @@ timezone_str(PyDateTime_TimeZone *self) char sign; if (self->name != NULL) { - Py_INCREF(self->name); - return self->name; + return Py_NewRef(self->name); } if ((PyObject *)self == PyDateTime_TimeZone_UTC || (GET_TD_DAYS(self->offset) == 0 && @@ -4009,8 +3979,7 @@ timezone_str(PyDateTime_TimeZone *self) } else { sign = '+'; - offset = self->offset; - Py_INCREF(offset); + offset = Py_NewRef(self->offset); } /* Offset is not negative here. */ microseconds = GET_TD_MICROSECONDS(offset); @@ -4045,8 +4014,7 @@ timezone_utcoffset(PyDateTime_TimeZone *self, PyObject *dt) if (_timezone_check_argument(dt, "utcoffset") == -1) return NULL; - Py_INCREF(self->offset); - return self->offset; + return Py_NewRef(self->offset); } static PyObject * @@ -4183,8 +4151,7 @@ static PyObject * time_tzinfo(PyDateTime_Time *self, void *unused) { PyObject *result = HASTZINFO(self) ? self->tzinfo : Py_None; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static PyObject * @@ -4229,8 +4196,7 @@ time_from_pickle(PyTypeObject *type, PyObject *state, PyObject *tzinfo) me->hashcode = -1; me->hastzinfo = aware; if (aware) { - Py_INCREF(tzinfo); - me->tzinfo = tzinfo; + me->tzinfo = Py_NewRef(tzinfo); } if (pdata[0] & (1 << 7)) { me->data[0] -= 128; @@ -4368,7 +4334,7 @@ time_repr(PyDateTime_Time *self) static PyObject * time_str(PyDateTime_Time *self) { - return _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_isoformat); + return PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(isoformat)); } static PyObject * @@ -4526,12 +4492,10 @@ time_richcompare(PyObject *self, PyObject *other, int op) result = diff_to_bool(diff, op); } else if (op == Py_EQ) { - result = Py_False; - Py_INCREF(result); + result = Py_NewRef(Py_False); } else if (op == Py_NE) { - result = Py_True; - Py_INCREF(result); + result = Py_NewRef(Py_True); } else { PyErr_SetString(PyExc_TypeError, @@ -4560,8 +4524,7 @@ time_hash(PyDateTime_Time *self) return -1; } else { - self0 = (PyObject *)self; - Py_INCREF(self0); + self0 = Py_NewRef(self); } offset = time_utcoffset(self0, NULL); Py_DECREF(self0); @@ -4858,8 +4821,7 @@ static PyObject * datetime_tzinfo(PyDateTime_DateTime *self, void *unused) { PyObject *result = HASTZINFO(self) ? self->tzinfo : Py_None; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static PyObject * @@ -4906,8 +4868,7 @@ datetime_from_pickle(PyTypeObject *type, PyObject *state, PyObject *tzinfo) me->hashcode = -1; me->hastzinfo = aware; if (aware) { - Py_INCREF(tzinfo); - me->tzinfo = tzinfo; + me->tzinfo = Py_NewRef(tzinfo); } if (pdata[2] & (1 << 7)) { me->data[2] -= 128; @@ -5170,7 +5131,9 @@ datetime_datetime_now_impl(PyTypeObject *type, PyObject *tz) tz); if (self != NULL && tz != Py_None) { /* Convert UTC to tzinfo's zone. */ - self = _PyObject_CallMethodId(tz, &PyId_fromutc, "N", self); + PyObject *res = PyObject_CallMethodOneArg(tz, &_Py_ID(fromutc), self); + Py_DECREF(self); + return res; } return self; } @@ -5206,7 +5169,9 @@ datetime_fromtimestamp(PyObject *cls, PyObject *args, PyObject *kw) tzinfo); if (self != NULL && tzinfo != Py_None) { /* Convert UTC to tzinfo's zone. */ - self = _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "N", self); + PyObject *res = PyObject_CallMethodOneArg(tzinfo, &_Py_ID(fromutc), self); + Py_DECREF(self); + return res; } return self; } @@ -5230,7 +5195,6 @@ datetime_strptime(PyObject *cls, PyObject *args) { static PyObject *module = NULL; PyObject *string, *format; - _Py_IDENTIFIER(_strptime_datetime); if (!PyArg_ParseTuple(args, "UU:strptime", &string, &format)) return NULL; @@ -5240,7 +5204,7 @@ datetime_strptime(PyObject *cls, PyObject *args) if (module == NULL) return NULL; } - return _PyObject_CallMethodIdObjArgs(module, &PyId__strptime_datetime, + return PyObject_CallMethodObjArgs(module, &_Py_ID(_strptime_datetime), cls, string, format, NULL); } @@ -5316,8 +5280,7 @@ _sanitize_isoformat_str(PyObject *dtstr) } if (surrogate_separator == 0) { - Py_INCREF(dtstr); - return dtstr; + return Py_NewRef(dtstr); } PyObject *str_out = _PyUnicode_Copy(dtstr); @@ -5631,9 +5594,8 @@ datetime_subtract(PyObject *left, PyObject *right) int delta_d, delta_s, delta_us; if (GET_DT_TZINFO(left) == GET_DT_TZINFO(right)) { - offset2 = offset1 = Py_None; - Py_INCREF(offset1); - Py_INCREF(offset2); + offset1 = Py_NewRef(Py_None); + offset2 = Py_NewRef(Py_None); } else { offset1 = datetime_utcoffset(left, NULL); @@ -5747,7 +5709,14 @@ datetime_repr(PyDateTime_DateTime *self) static PyObject * datetime_str(PyDateTime_DateTime *self) { - return _PyObject_CallMethodId((PyObject *)self, &PyId_isoformat, "s", " "); + PyObject *space = PyUnicode_FromString(" "); + if (space == NULL) { + return NULL; + } + PyObject *res = PyObject_CallMethodOneArg((PyObject *)self, + &_Py_ID(isoformat), space); + Py_DECREF(space); + return res; } static PyObject * @@ -5971,12 +5940,10 @@ datetime_richcompare(PyObject *self, PyObject *other, int op) result = diff_to_bool(diff, op); } else if (op == Py_EQ) { - result = Py_False; - Py_INCREF(result); + result = Py_NewRef(Py_False); } else if (op == Py_NE) { - result = Py_True; - Py_INCREF(result); + result = Py_NewRef(Py_True); } else { PyErr_SetString(PyExc_TypeError, @@ -6008,8 +5975,7 @@ datetime_hash(PyDateTime_DateTime *self) return -1; } else { - self0 = (PyObject *)self; - Py_INCREF(self0); + self0 = Py_NewRef(self); } offset = datetime_utcoffset(self0, NULL); Py_DECREF(self0); @@ -6226,15 +6192,13 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) if (self_tzinfo == NULL) return NULL; } else { - self_tzinfo = self->tzinfo; - Py_INCREF(self_tzinfo); + self_tzinfo = Py_NewRef(self->tzinfo); } /* Conversion to self's own time zone is a NOP. */ if (self_tzinfo == tzinfo) { Py_DECREF(self_tzinfo); - Py_INCREF(self); - return self; + return (PyDateTime_DateTime*)Py_NewRef(self); } /* Convert self to UTC. */ @@ -6279,14 +6243,10 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) } else { /* Result is already aware - just replace tzinfo. */ - temp = result->tzinfo; - result->tzinfo = PyDateTime_TimeZone_UTC; - Py_INCREF(result->tzinfo); - Py_DECREF(temp); + Py_SETREF(result->tzinfo, Py_NewRef(PyDateTime_TimeZone_UTC)); } /* Attach new tzinfo and let fromutc() do the rest. */ - temp = result->tzinfo; if (tzinfo == Py_None) { tzinfo = local_timezone(result); if (tzinfo == NULL) { @@ -6296,12 +6256,11 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) } else Py_INCREF(tzinfo); - result->tzinfo = tzinfo; - Py_DECREF(temp); + Py_SETREF(result->tzinfo, tzinfo); temp = (PyObject *)result; result = (PyDateTime_DateTime *) - _PyObject_CallMethodIdOneArg(tzinfo, &PyId_fromutc, temp); + PyObject_CallMethodOneArg(tzinfo, &_Py_ID(fromutc), temp); Py_DECREF(temp); return result; @@ -6451,8 +6410,7 @@ datetime_utctimetuple(PyDateTime_DateTime *self, PyObject *Py_UNUSED(ignored)) tzinfo = GET_DT_TZINFO(self); if (tzinfo == Py_None) { - utcself = self; - Py_INCREF(utcself); + utcself = (PyDateTime_DateTime*)Py_NewRef(self); } else { PyObject *offset; @@ -6461,8 +6419,7 @@ datetime_utctimetuple(PyDateTime_DateTime *self, PyObject *Py_UNUSED(ignored)) return NULL; if (offset == Py_None) { Py_DECREF(offset); - utcself = self; - Py_INCREF(utcself); + utcself = (PyDateTime_DateTime*)Py_NewRef(self); } else { utcself = (PyDateTime_DateTime *)add_datetime_timedelta(self, diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c index 9c83e380afd0f3..54376022dcb182 100644 --- a/Modules/_dbmmodule.c +++ b/Modules/_dbmmodule.c @@ -358,8 +358,7 @@ _dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, return PyBytes_FromStringAndSize(val.dptr, val.dsize); } - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } /*[clinic input] @@ -419,8 +418,7 @@ _dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, static PyObject * dbm__enter__(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 548bd601b0eb87..bc97615ffb4b72 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -116,15 +116,13 @@ static PyTypeObject PyDecContextManager_Type; Py_LOCAL_INLINE(PyObject *) incr_true(void) { - Py_INCREF(Py_True); - return Py_True; + return Py_NewRef(Py_True); } Py_LOCAL_INLINE(PyObject *) incr_false(void) { - Py_INCREF(Py_False); - return Py_False; + return Py_NewRef(Py_False); } @@ -655,8 +653,7 @@ signaldict_richcompare(PyObject *v, PyObject *w, int op) } } - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject * @@ -754,8 +751,7 @@ context_getround(PyObject *self, void *closure UNUSED) { int i = mpd_getround(CTX(self)); - Py_INCREF(round_map[i]); - return round_map[i]; + return Py_NewRef(round_map[i]); } static PyObject * @@ -1122,13 +1118,11 @@ context_getattr(PyObject *self, PyObject *name) if (PyUnicode_Check(name)) { if (PyUnicode_CompareWithASCIIString(name, "traps") == 0) { retval = ((PyDecContextObject *)self)->traps; - Py_INCREF(retval); - return retval; + return Py_NewRef(retval); } if (PyUnicode_CompareWithASCIIString(name, "flags") == 0) { retval = ((PyDecContextObject *)self)->flags; - Py_INCREF(retval); - return retval; + return Py_NewRef(retval); } } @@ -1602,8 +1596,7 @@ PyDec_GetCurrentContext(PyObject *self UNUSED, PyObject *args UNUSED) return NULL; } - Py_INCREF(context); - return context; + return Py_NewRef(context); } /* Set the thread local context to a new context, decrement old reference */ @@ -1778,8 +1771,7 @@ ctxmanager_new(PyTypeObject *type UNUSED, PyObject *args, PyObject *kwds) Py_DECREF(self); return NULL; } - self->global = global; - Py_INCREF(self->global); + self->global = Py_NewRef(global); int ret = context_setattrs( self->local, prec, rounding, @@ -1814,8 +1806,7 @@ ctxmanager_set_local(PyDecContextManagerObject *self, PyObject *args UNUSED) } Py_DECREF(ret); - Py_INCREF(self->local); - return self->local; + return Py_NewRef(self->local); } static PyObject * @@ -2418,8 +2409,7 @@ PyDecType_FromDecimalExact(PyTypeObject *type, PyObject *v, PyObject *context) uint32_t status = 0; if (type == &PyDec_Type && PyDec_CheckExact(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } dec = PyDecType_New(type); @@ -2440,8 +2430,7 @@ static PyObject * sequence_as_tuple(PyObject *v, PyObject *ex, const char *mesg) { if (PyTuple_Check(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (PyList_Check(v)) { return PyList_AsTuple(v); @@ -2863,8 +2852,7 @@ convert_op(int type_err, PyObject **conv, PyObject *v, PyObject *context) { if (PyDec_Check(v)) { - *conv = v; - Py_INCREF(v); + *conv = Py_NewRef(v); return 1; } if (PyLong_Check(v)) { @@ -2881,8 +2869,7 @@ convert_op(int type_err, PyObject **conv, PyObject *v, PyObject *context) Py_TYPE(v)->tp_name); } else { - Py_INCREF(Py_NotImplemented); - *conv = Py_NotImplemented; + *conv = Py_NewRef(Py_NotImplemented); } return 0; } @@ -3041,8 +3028,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, *vcmp = v; if (PyDec_Check(w)) { - Py_INCREF(w); - *wcmp = w; + *wcmp = Py_NewRef(w); } else if (PyLong_Check(w)) { *wcmp = PyDec_FromLongExact(w, context); @@ -3074,8 +3060,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, } } else { - Py_INCREF(Py_NotImplemented); - *wcmp = Py_NotImplemented; + *wcmp = Py_NewRef(Py_NotImplemented); } } else { @@ -3093,8 +3078,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, } } else { - Py_INCREF(Py_NotImplemented); - *wcmp = Py_NotImplemented; + *wcmp = Py_NewRef(Py_NotImplemented); } } @@ -4329,15 +4313,13 @@ dec_mpd_adjexp(PyObject *self, PyObject *dummy UNUSED) static PyObject * dec_canonical(PyObject *self, PyObject *dummy UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * dec_conjugate(PyObject *self, PyObject *dummy UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -4654,8 +4636,7 @@ dec_complex(PyObject *self, PyObject *dummy UNUSED) static PyObject * dec_copy(PyObject *self, PyObject *dummy UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } /* __floor__ */ @@ -4815,13 +4796,11 @@ dec_reduce(PyObject *self, PyObject *dummy UNUSED) static PyObject * dec_sizeof(PyObject *v, PyObject *dummy UNUSED) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(v)); + size_t res = _PyObject_SIZE(Py_TYPE(v)); if (mpd_isdynamic_data(MPD(v))) { - res += MPD(v)->alloc * sizeof(mpd_uint_t); + res += (size_t)MPD(v)->alloc * sizeof(mpd_uint_t); } - return PyLong_FromSsize_t(res); + return PyLong_FromSize_t(res); } /* __trunc__ */ @@ -4838,8 +4817,7 @@ dec_trunc(PyObject *self, PyObject *dummy UNUSED) static PyObject * dec_real(PyObject *self, void *closure UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -5384,8 +5362,7 @@ ctx_canonical(PyObject *context UNUSED, PyObject *v) return NULL; } - Py_INCREF(v); - return v; + return Py_NewRef(v); } static PyObject * @@ -5916,23 +5893,17 @@ PyInit__decimal(void) /* Create the module */ ASSIGN_PTR(m, PyModule_Create(&_decimal_module)); - /* Add types to the module */ - Py_INCREF(&PyDec_Type); - CHECK_INT(PyModule_AddObject(m, "Decimal", (PyObject *)&PyDec_Type)); - Py_INCREF(&PyDecContext_Type); + CHECK_INT(PyModule_AddObject(m, "Decimal", Py_NewRef(&PyDec_Type))); CHECK_INT(PyModule_AddObject(m, "Context", - (PyObject *)&PyDecContext_Type)); - Py_INCREF(DecimalTuple); - CHECK_INT(PyModule_AddObject(m, "DecimalTuple", (PyObject *)DecimalTuple)); - + Py_NewRef(&PyDecContext_Type))); + CHECK_INT(PyModule_AddObject(m, "DecimalTuple", Py_NewRef(DecimalTuple))); /* Create top level exception */ ASSIGN_PTR(DecimalException, PyErr_NewException( "decimal.DecimalException", PyExc_ArithmeticError, NULL)); - Py_INCREF(DecimalException); - CHECK_INT(PyModule_AddObject(m, "DecimalException", DecimalException)); + CHECK_INT(PyModule_AddObject(m, "DecimalException", Py_NewRef(DecimalException))); /* Create signal tuple */ ASSIGN_PTR(SignalTuple, PyTuple_New(SIGNAL_MAP_LEN)); @@ -5972,12 +5943,10 @@ PyInit__decimal(void) Py_DECREF(base); /* add to module */ - Py_INCREF(cm->ex); - CHECK_INT(PyModule_AddObject(m, cm->name, cm->ex)); + CHECK_INT(PyModule_AddObject(m, cm->name, Py_NewRef(cm->ex))); /* add to signal tuple */ - Py_INCREF(cm->ex); - PyTuple_SET_ITEM(SignalTuple, i, cm->ex); + PyTuple_SET_ITEM(SignalTuple, i, Py_NewRef(cm->ex)); } /* @@ -6003,45 +5972,38 @@ PyInit__decimal(void) ASSIGN_PTR(cm->ex, PyErr_NewException(cm->fqname, base, NULL)); Py_DECREF(base); - Py_INCREF(cm->ex); - CHECK_INT(PyModule_AddObject(m, cm->name, cm->ex)); + CHECK_INT(PyModule_AddObject(m, cm->name, Py_NewRef(cm->ex))); } /* Init default context template first */ ASSIGN_PTR(default_context_template, PyObject_CallObject((PyObject *)&PyDecContext_Type, NULL)); - Py_INCREF(default_context_template); CHECK_INT(PyModule_AddObject(m, "DefaultContext", - default_context_template)); + Py_NewRef(default_context_template))); #ifndef WITH_DECIMAL_CONTEXTVAR ASSIGN_PTR(tls_context_key, PyUnicode_FromString("___DECIMAL_CTX__")); - Py_INCREF(Py_False); - CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_False)); + CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_NewRef(Py_False))); #else ASSIGN_PTR(current_context_var, PyContextVar_New("decimal_context", NULL)); - Py_INCREF(Py_True); - CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_True)); + CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_NewRef(Py_True))); #endif - Py_INCREF(Py_True); - CHECK_INT(PyModule_AddObject(m, "HAVE_THREADS", Py_True)); + CHECK_INT(PyModule_AddObject(m, "HAVE_THREADS", Py_NewRef(Py_True))); /* Init basic context template */ ASSIGN_PTR(basic_context_template, PyObject_CallObject((PyObject *)&PyDecContext_Type, NULL)); init_basic_context(basic_context_template); - Py_INCREF(basic_context_template); CHECK_INT(PyModule_AddObject(m, "BasicContext", - basic_context_template)); + Py_NewRef(basic_context_template))); /* Init extended context template */ ASSIGN_PTR(extended_context_template, PyObject_CallObject((PyObject *)&PyDecContext_Type, NULL)); init_extended_context(extended_context_template); - Py_INCREF(extended_context_template); CHECK_INT(PyModule_AddObject(m, "ExtendedContext", - extended_context_template)); + Py_NewRef(extended_context_template))); /* Init mpd_ssize_t constants */ @@ -6060,8 +6022,7 @@ PyInit__decimal(void) /* Init string constants */ for (i = 0; i < _PY_DEC_ROUND_GUARD; i++) { ASSIGN_PTR(round_map[i], PyUnicode_InternFromString(mpd_round_string[i])); - Py_INCREF(round_map[i]); - CHECK_INT(PyModule_AddObject(m, mpd_round_string[i], round_map[i])); + CHECK_INT(PyModule_AddObject(m, mpd_round_string[i], Py_NewRef(round_map[i]))); } /* Add specification version number */ diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index 87f18d6e671a63..555c22f88b36d5 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -12,7 +12,6 @@ */ #define PY_SSIZE_T_CLEAN -#define NEEDS_PY_IDENTIFIER #include "Python.h" #include "structmember.h" // PyMemberDef @@ -84,6 +83,15 @@ typedef struct { PyObject *elementpath_obj; PyObject *comment_factory; PyObject *pi_factory; + /* Interned strings */ + PyObject *str_text; + PyObject *str_tail; + PyObject *str_append; + PyObject *str_find; + PyObject *str_findtext; + PyObject *str_findall; + PyObject *str_iterfind; + PyObject *str_doctype; } elementtreestate; static struct PyModuleDef elementtreemodule; @@ -221,8 +229,7 @@ create_extra(ElementObject* self, PyObject* attrib) return -1; } - Py_XINCREF(attrib); - self->extra->attrib = attrib; + self->extra->attrib = Py_XNewRef(attrib); self->extra->length = 0; self->extra->allocated = STATIC_CHILDREN; @@ -278,16 +285,9 @@ create_new_element(PyObject* tag, PyObject* attrib) if (self == NULL) return NULL; self->extra = NULL; - - Py_INCREF(tag); - self->tag = tag; - - Py_INCREF(Py_None); - self->text = Py_None; - - Py_INCREF(Py_None); - self->tail = Py_None; - + self->tag = Py_NewRef(tag); + self->text = Py_NewRef(Py_None); + self->tail = Py_NewRef(Py_None); self->weakreflist = NULL; PyObject_GC_Track(self); @@ -307,15 +307,9 @@ element_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { ElementObject *e = (ElementObject *)type->tp_alloc(type, 0); if (e != NULL) { - Py_INCREF(Py_None); - e->tag = Py_None; - - Py_INCREF(Py_None); - e->text = Py_None; - - Py_INCREF(Py_None); - e->tail = Py_None; - + e->tag = Py_NewRef(Py_None); + e->text = Py_NewRef(Py_None); + e->tail = Py_NewRef(Py_None); e->extra = NULL; e->weakreflist = NULL; } @@ -351,8 +345,7 @@ get_attrib_from_keywords(PyObject *kwds) } attrib = PyDict_Copy(attrib); if (attrib && PyDict_DelItem(kwds, attrib_str) < 0) { - Py_DECREF(attrib); - attrib = NULL; + Py_SETREF(attrib, NULL); } } else if (!PyErr_Occurred()) { @@ -417,14 +410,10 @@ element_init(PyObject *self, PyObject *args, PyObject *kwds) Py_XDECREF(attrib); /* Replace the objects already pointed to by tag, text and tail. */ - Py_INCREF(tag); - Py_XSETREF(self_elem->tag, tag); - - Py_INCREF(Py_None); - _set_joined_ptr(&self_elem->text, Py_None); + Py_XSETREF(self_elem->tag, Py_NewRef(tag)); - Py_INCREF(Py_None); - _set_joined_ptr(&self_elem->tail, Py_None); + _set_joined_ptr(&self_elem->text, Py_NewRef(Py_None)); + _set_joined_ptr(&self_elem->tail, Py_NewRef(Py_None)); return 0; } @@ -506,8 +495,7 @@ element_add_subelement(ElementObject* self, PyObject* element) if (element_resize(self, 1) < 0) return -1; - Py_INCREF(element); - self->extra->children[self->extra->length] = element; + self->extra->children[self->extra->length] = Py_NewRef(element); self->extra->length++; @@ -544,8 +532,7 @@ element_get_text(ElementObject* self) if (!tmp) return NULL; self->text = tmp; - Py_DECREF(res); - res = tmp; + Py_SETREF(res, tmp); } } @@ -566,8 +553,7 @@ element_get_tail(ElementObject* self) if (!tmp) return NULL; self->tail = tmp; - Py_DECREF(res); - res = tmp; + Py_SETREF(res, tmp); } } @@ -699,11 +685,8 @@ _elementtree_Element_clear_impl(ElementObject *self) { clear_extra(self); - Py_INCREF(Py_None); - _set_joined_ptr(&self->text, Py_None); - - Py_INCREF(Py_None); - _set_joined_ptr(&self->tail, Py_None); + _set_joined_ptr(&self->text, Py_NewRef(Py_None)); + _set_joined_ptr(&self->tail, Py_NewRef(Py_None)); Py_RETURN_NONE; } @@ -739,8 +722,7 @@ _elementtree_Element___copy___impl(ElementObject *self) } for (i = 0; i < self->extra->length; i++) { - Py_INCREF(self->extra->children[i]); - element->extra->children[i] = self->extra->children[i]; + element->extra->children[i] = Py_NewRef(self->extra->children[i]); } assert(!element->extra->length); @@ -855,8 +837,7 @@ deepcopy(PyObject *object, PyObject *memo) /* Fast paths */ if (object == Py_None || PyUnicode_CheckExact(object)) { - Py_INCREF(object); - return object; + return Py_NewRef(object); } if (Py_REFCNT(object) == 1) { @@ -895,19 +876,20 @@ deepcopy(PyObject *object, PyObject *memo) /*[clinic input] -_elementtree.Element.__sizeof__ -> Py_ssize_t +_elementtree.Element.__sizeof__ -> size_t [clinic start generated code]*/ -static Py_ssize_t +static size_t _elementtree_Element___sizeof___impl(ElementObject *self) -/*[clinic end generated code: output=bf73867721008000 input=70f4b323d55a17c1]*/ +/*[clinic end generated code: output=baae4e7ae9fe04ec input=54e298c501f3e0d0]*/ { - Py_ssize_t result = _PyObject_SIZE(Py_TYPE(self)); + size_t result = _PyObject_SIZE(Py_TYPE(self)); if (self->extra) { result += sizeof(ElementObjectExtra); - if (self->extra->children != self->extra->_children) - result += sizeof(PyObject*) * self->extra->allocated; + if (self->extra->children != self->extra->_children) { + result += (size_t)self->extra->allocated * sizeof(PyObject*); + } } return result; } @@ -942,14 +924,12 @@ _elementtree_Element___getstate___impl(ElementObject *self) if (!children) return NULL; for (i = 0; i < PyList_GET_SIZE(children); i++) { - PyObject *child = self->extra->children[i]; - Py_INCREF(child); + PyObject *child = Py_NewRef(self->extra->children[i]); PyList_SET_ITEM(children, i, child); } if (self->extra && self->extra->attrib) { - attrib = self->extra->attrib; - Py_INCREF(attrib); + attrib = Py_NewRef(self->extra->attrib); } else { attrib = PyDict_New(); @@ -983,8 +963,7 @@ element_setstate_from_attributes(ElementObject *self, return NULL; } - Py_INCREF(tag); - Py_XSETREF(self->tag, tag); + Py_XSETREF(self->tag, Py_NewRef(tag)); text = text ? JOIN_SET(text, PyList_CheckExact(text)) : Py_None; Py_INCREF(JOIN_OBJ(text)); @@ -1035,8 +1014,7 @@ element_setstate_from_attributes(ElementObject *self, dealloc_extra(oldextra); return NULL; } - Py_INCREF(child); - self->extra->children[i] = child; + self->extra->children[i] = Py_NewRef(child); } assert(!self->extra->length); @@ -1049,8 +1027,7 @@ element_setstate_from_attributes(ElementObject *self, } /* Stash attrib. */ - Py_XINCREF(attrib); - Py_XSETREF(self->extra->attrib, attrib); + Py_XSETREF(self->extra->attrib, Py_XNewRef(attrib)); dealloc_extra(oldextra); Py_RETURN_NONE; @@ -1187,8 +1164,7 @@ _elementtree_Element_extend(ElementObject *self, PyObject *elements) } for (i = 0; i < PySequence_Fast_GET_SIZE(seq); i++) { - PyObject* element = PySequence_Fast_GET_ITEM(seq, i); - Py_INCREF(element); + PyObject* element = Py_NewRef(PySequence_Fast_GET_ITEM(seq, i)); if (element_add_subelement(self, element) < 0) { Py_DECREF(seq); Py_DECREF(element); @@ -1219,9 +1195,8 @@ _elementtree_Element_find_impl(ElementObject *self, PyObject *path, elementtreestate *st = ET_STATE_GLOBAL; if (checkpath(path) || namespaces != Py_None) { - _Py_IDENTIFIER(find); - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_find, self, path, namespaces, NULL + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_find, self, path, namespaces, NULL ); } @@ -1260,18 +1235,16 @@ _elementtree_Element_findtext_impl(ElementObject *self, PyObject *path, /*[clinic end generated code: output=83b3ba4535d308d2 input=b53a85aa5aa2a916]*/ { Py_ssize_t i; - _Py_IDENTIFIER(findtext); elementtreestate *st = ET_STATE_GLOBAL; if (checkpath(path) || namespaces != Py_None) - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_findtext, + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_findtext, self, path, default_value, namespaces, NULL ); if (!self->extra) { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } for (i = 0; i < self->extra->length; i++) { @@ -1295,8 +1268,7 @@ _elementtree_Element_findtext_impl(ElementObject *self, PyObject *path, return NULL; } - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } /*[clinic input] @@ -1317,9 +1289,8 @@ _elementtree_Element_findall_impl(ElementObject *self, PyObject *path, elementtreestate *st = ET_STATE_GLOBAL; if (checkpath(path) || namespaces != Py_None) { - _Py_IDENTIFIER(findall); - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_findall, self, path, namespaces, NULL + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_findall, self, path, namespaces, NULL ); } @@ -1361,11 +1332,10 @@ _elementtree_Element_iterfind_impl(ElementObject *self, PyObject *path, /*[clinic end generated code: output=ecdd56d63b19d40f input=abb974e350fb65c7]*/ { PyObject* tag = path; - _Py_IDENTIFIER(iterfind); elementtreestate *st = ET_STATE_GLOBAL; - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_iterfind, self, tag, namespaces, NULL); + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_iterfind, self, tag, namespaces, NULL); } /*[clinic input] @@ -1382,18 +1352,15 @@ _elementtree_Element_get_impl(ElementObject *self, PyObject *key, /*[clinic end generated code: output=523c614142595d75 input=ee153bbf8cdb246e]*/ { if (self->extra && self->extra->attrib) { - PyObject *attrib = self->extra->attrib; - Py_INCREF(attrib); - PyObject *value = PyDict_GetItemWithError(attrib, key); - Py_XINCREF(value); + PyObject *attrib = Py_NewRef(self->extra->attrib); + PyObject *value = Py_XNewRef(PyDict_GetItemWithError(attrib, key)); Py_DECREF(attrib); if (value != NULL || PyErr_Occurred()) { return value; } } - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } static PyObject * @@ -1452,8 +1419,7 @@ element_getitem(PyObject* self_, Py_ssize_t index) return NULL; } - Py_INCREF(self->extra->children[index]); - return self->extra->children[index]; + return Py_NewRef(self->extra->children[index]); } /*[clinic input] @@ -1491,8 +1457,7 @@ _elementtree_Element_insert_impl(ElementObject *self, Py_ssize_t index, for (i = self->extra->length; i > index; i--) self->extra->children[i] = self->extra->children[i-1]; - Py_INCREF(subelement); - self->extra->children[index] = subelement; + self->extra->children[index] = Py_NewRef(subelement); self->extra->length++; @@ -1693,8 +1658,7 @@ element_setitem(PyObject* self_, Py_ssize_t index, PyObject* item) raise_type_error(item); return -1; } - Py_INCREF(item); - self->extra->children[index] = item; + self->extra->children[index] = Py_NewRef(item); } else { self->extra->length--; for (i = index; i < self->extra->length; i++) @@ -1744,8 +1708,7 @@ element_subscr(PyObject* self_, PyObject* item) for (cur = start, i = 0; i < slicelen; cur += step, i++) { - PyObject* item = self->extra->children[cur]; - Py_INCREF(item); + PyObject* item = Py_NewRef(self->extra->children[cur]); PyList_SET_ITEM(list, i, item); } @@ -1925,8 +1888,7 @@ element_ass_subscr(PyObject* self_, PyObject* item, PyObject* value) for (cur = start, i = 0; i < newlen; cur += step, i++) { PyObject* element = PySequence_Fast_GET_ITEM(seq, i); - Py_INCREF(element); - self->extra->children[cur] = element; + self->extra->children[cur] = Py_NewRef(element); } self->extra->length += newlen - slicelen; @@ -1949,24 +1911,21 @@ static PyObject* element_tag_getter(ElementObject *self, void *closure) { PyObject *res = self->tag; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject* element_text_getter(ElementObject *self, void *closure) { PyObject *res = element_get_text(self); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } static PyObject* element_tail_getter(ElementObject *self, void *closure) { PyObject *res = element_get_tail(self); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } static PyObject* @@ -1978,8 +1937,7 @@ element_attrib_getter(ElementObject *self, void *closure) return NULL; } res = element_get_attrib(self); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } /* macro for setter validation */ @@ -1995,8 +1953,7 @@ static int element_tag_setter(ElementObject *self, PyObject *value, void *closure) { _VALIDATE_ATTR_VALUE(value); - Py_INCREF(value); - Py_SETREF(self->tag, value); + Py_SETREF(self->tag, Py_NewRef(value)); return 0; } @@ -2004,8 +1961,7 @@ static int element_text_setter(ElementObject *self, PyObject *value, void *closure) { _VALIDATE_ATTR_VALUE(value); - Py_INCREF(value); - _set_joined_ptr(&self->text, value); + _set_joined_ptr(&self->text, Py_NewRef(value)); return 0; } @@ -2013,8 +1969,7 @@ static int element_tail_setter(ElementObject *self, PyObject *value, void *closure) { _VALIDATE_ATTR_VALUE(value); - Py_INCREF(value); - _set_joined_ptr(&self->tail, value); + _set_joined_ptr(&self->tail, Py_NewRef(value)); return 0; } @@ -2032,8 +1987,7 @@ element_attrib_setter(ElementObject *self, PyObject *value, void *closure) if (create_extra(self, NULL) < 0) return -1; } - Py_INCREF(value); - Py_XSETREF(self->extra->attrib, value); + Py_XSETREF(self->extra->attrib, Py_NewRef(value)); return 0; } @@ -2119,8 +2073,7 @@ parent_stack_push_new(ElementIterObject *it, ElementObject *parent) it->parent_stack_size = new_size; } item = it->parent_stack + it->parent_stack_used++; - Py_INCREF(parent); - item->parent = parent; + item->parent = (ElementObject*)Py_NewRef(parent); item->child_index = 0; return 0; } @@ -2181,9 +2134,8 @@ elementiter_next(ElementIterObject *it) } assert(Element_Check(extra->children[child_index])); - elem = (ElementObject *)extra->children[child_index]; + elem = (ElementObject *)Py_NewRef(extra->children[child_index]); item->child_index++; - Py_INCREF(elem); } if (parent_stack_push_new(it, elem) < 0) { @@ -2287,11 +2239,9 @@ create_elementiter(ElementObject *self, PyObject *tag, int gettext) if (!it) return NULL; - Py_INCREF(tag); - it->sought_tag = tag; + it->sought_tag = Py_NewRef(tag); it->gettext = gettext; - Py_INCREF(self); - it->root_element = self; + it->root_element = (ElementObject*)Py_NewRef(self); it->parent_stack = PyMem_New(ParentLocator, INIT_PARENT_STACK_SIZE); if (it->parent_stack == NULL) { @@ -2353,12 +2303,8 @@ treebuilder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) TreeBuilderObject *t = (TreeBuilderObject *)type->tp_alloc(type, 0); if (t != NULL) { t->root = NULL; - - Py_INCREF(Py_None); - t->this = Py_None; - Py_INCREF(Py_None); - t->last = Py_None; - + t->this = Py_NewRef(Py_None); + t->last = Py_NewRef(Py_None); t->data = NULL; t->element_factory = NULL; t->comment_factory = NULL; @@ -2402,8 +2348,7 @@ _elementtree_TreeBuilder___init___impl(TreeBuilderObject *self, /*[clinic end generated code: output=8571d4dcadfdf952 input=ae98a94df20b5cc3]*/ { if (element_factory != Py_None) { - Py_INCREF(element_factory); - Py_XSETREF(self->element_factory, element_factory); + Py_XSETREF(self->element_factory, Py_NewRef(element_factory)); } else { Py_CLEAR(self->element_factory); } @@ -2413,8 +2358,7 @@ _elementtree_TreeBuilder___init___impl(TreeBuilderObject *self, comment_factory = st->comment_factory; } if (comment_factory) { - Py_INCREF(comment_factory); - Py_XSETREF(self->comment_factory, comment_factory); + Py_XSETREF(self->comment_factory, Py_NewRef(comment_factory)); self->insert_comments = insert_comments; } else { Py_CLEAR(self->comment_factory); @@ -2426,8 +2370,7 @@ _elementtree_TreeBuilder___init___impl(TreeBuilderObject *self, pi_factory = st->pi_factory; } if (pi_factory) { - Py_INCREF(pi_factory); - Py_XSETREF(self->pi_factory, pi_factory); + Py_XSETREF(self->pi_factory, Py_NewRef(pi_factory)); self->insert_pis = insert_pis; } else { Py_CLEAR(self->pi_factory); @@ -2530,14 +2473,12 @@ _elementtree__set_factories_impl(PyObject *module, PyObject *comment_factory, if (comment_factory == Py_None) { Py_CLEAR(st->comment_factory); } else { - Py_INCREF(comment_factory); - Py_XSETREF(st->comment_factory, comment_factory); + Py_XSETREF(st->comment_factory, Py_NewRef(comment_factory)); } if (pi_factory == Py_None) { Py_CLEAR(st->pi_factory); } else { - Py_INCREF(pi_factory); - Py_XSETREF(st->pi_factory, pi_factory); + Py_XSETREF(st->pi_factory, Py_NewRef(pi_factory)); } return old; @@ -2545,7 +2486,7 @@ _elementtree__set_factories_impl(PyObject *module, PyObject *comment_factory, static int treebuilder_extend_element_text_or_tail(PyObject *element, PyObject **data, - PyObject **dest, _Py_Identifier *name) + PyObject **dest, PyObject *name) { /* Fast paths for the "almost always" cases. */ if (Element_CheckExact(element)) { @@ -2569,7 +2510,7 @@ treebuilder_extend_element_text_or_tail(PyObject *element, PyObject **data, { int r; PyObject* joined; - PyObject* previous = _PyObject_GetAttrId(element, name); + PyObject* previous = PyObject_GetAttr(element, name); if (!previous) return -1; joined = list_join(*data); @@ -2588,7 +2529,7 @@ treebuilder_extend_element_text_or_tail(PyObject *element, PyObject **data, Py_DECREF(previous); } - r = _PyObject_SetAttrId(element, name, joined); + r = PyObject_SetAttr(element, name, joined); Py_DECREF(joined); if (r < 0) return -1; @@ -2603,34 +2544,32 @@ treebuilder_flush_data(TreeBuilderObject* self) if (!self->data) { return 0; } - + elementtreestate *st = ET_STATE_GLOBAL; if (!self->last_for_tail) { PyObject *element = self->last; - _Py_IDENTIFIER(text); return treebuilder_extend_element_text_or_tail( element, &self->data, - &((ElementObject *) element)->text, &PyId_text); + &((ElementObject *) element)->text, st->str_text); } else { PyObject *element = self->last_for_tail; - _Py_IDENTIFIER(tail); return treebuilder_extend_element_text_or_tail( element, &self->data, - &((ElementObject *) element)->tail, &PyId_tail); + &((ElementObject *) element)->tail, st->str_tail); } } static int treebuilder_add_subelement(PyObject *element, PyObject *child) { - _Py_IDENTIFIER(append); + elementtreestate *st = ET_STATE_GLOBAL; if (Element_CheckExact(element)) { ElementObject *elem = (ElementObject *) element; return element_add_subelement(elem, child); } else { PyObject *res; - res = _PyObject_CallMethodIdOneArg(element, &PyId_append, child); + res = PyObject_CallMethodOneArg(element, st->str_append, child); if (res == NULL) return -1; Py_DECREF(res); @@ -2703,8 +2642,7 @@ treebuilder_handle_start(TreeBuilderObject* self, PyObject* tag, ); goto error; } - Py_INCREF(node); - self->root = node; + self->root = Py_NewRef(node); } if (self->index < PyList_GET_SIZE(self->stack)) { @@ -2717,10 +2655,8 @@ treebuilder_handle_start(TreeBuilderObject* self, PyObject* tag, } self->index++; - Py_INCREF(node); - Py_SETREF(self->this, node); - Py_INCREF(node); - Py_SETREF(self->last, node); + Py_SETREF(self->this, Py_NewRef(node)); + Py_SETREF(self->last, Py_NewRef(node)); if (treebuilder_append_event(self, self->start_event_obj, node) < 0) goto error; @@ -2741,7 +2677,7 @@ treebuilder_handle_data(TreeBuilderObject* self, PyObject* data) Py_RETURN_NONE; } /* store the first item as is */ - Py_INCREF(data); self->data = data; + self->data = Py_NewRef(data); } else { /* more than one item; use a list to collect items */ if (PyBytes_CheckExact(self->data) && Py_REFCNT(self->data) == 1 && @@ -2760,9 +2696,9 @@ treebuilder_handle_data(TreeBuilderObject* self, PyObject* data) PyObject* list = PyList_New(2); if (!list) return NULL; - PyList_SET_ITEM(list, 0, self->data); - Py_INCREF(data); PyList_SET_ITEM(list, 1, data); - self->data = list; + PyList_SET_ITEM(list, 0, Py_NewRef(self->data)); + PyList_SET_ITEM(list, 1, Py_NewRef(data)); + Py_SETREF(self->data, list); } } @@ -2787,19 +2723,16 @@ treebuilder_handle_end(TreeBuilderObject* self, PyObject* tag) } item = self->last; - self->last = self->this; - Py_INCREF(self->last); + self->last = Py_NewRef(self->this); Py_XSETREF(self->last_for_tail, self->last); self->index--; - self->this = PyList_GET_ITEM(self->stack, self->index); - Py_INCREF(self->this); + self->this = Py_NewRef(PyList_GET_ITEM(self->stack, self->index)); Py_DECREF(item); if (treebuilder_append_event(self, self->end_event_obj, self->last) < 0) return NULL; - Py_INCREF(self->last); - return (PyObject*) self->last; + return Py_NewRef(self->last); } LOCAL(PyObject*) @@ -2821,12 +2754,10 @@ treebuilder_handle_comment(TreeBuilderObject* self, PyObject* text) if (self->insert_comments && this != Py_None) { if (treebuilder_add_subelement(this, comment) < 0) goto error; - Py_INCREF(comment); - Py_XSETREF(self->last_for_tail, comment); + Py_XSETREF(self->last_for_tail, Py_NewRef(comment)); } } else { - Py_INCREF(text); - comment = text; + comment = Py_NewRef(text); } if (self->events_append && self->comment_event_obj) { @@ -2862,8 +2793,7 @@ treebuilder_handle_pi(TreeBuilderObject* self, PyObject* target, PyObject* text) if (self->insert_pis && this != Py_None) { if (treebuilder_add_subelement(this, pi) < 0) goto error; - Py_INCREF(pi); - Py_XSETREF(self->last_for_tail, pi); + Py_XSETREF(self->last_for_tail, Py_NewRef(pi)); } } else { pi = PyTuple_Pack(2, target, text); @@ -2994,8 +2924,7 @@ treebuilder_done(TreeBuilderObject* self) else res = Py_None; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /*[clinic input] @@ -3083,12 +3012,9 @@ makeuniversal(XMLParserObject* self, const char* string) if (!key) return NULL; - value = PyDict_GetItemWithError(self->names, key); + value = Py_XNewRef(PyDict_GetItemWithError(self->names, key)); - if (value) { - Py_INCREF(value); - } - else if (!PyErr_Occurred()) { + if (value == NULL && !PyErr_Occurred()) { /* new name. convert to universal name, and decode as necessary */ @@ -3113,8 +3039,7 @@ makeuniversal(XMLParserObject* self, const char* string) size++; } else { /* plain name; use key as tag */ - Py_INCREF(key); - tag = key; + tag = Py_NewRef(key); } /* decode universal name */ @@ -3486,7 +3411,6 @@ expat_start_doctype_handler(XMLParserObject *self, const XML_Char *pubid, int has_internal_subset) { - _Py_IDENTIFIER(doctype); PyObject *doctype_name_obj, *sysid_obj, *pubid_obj; PyObject *res; @@ -3504,8 +3428,7 @@ expat_start_doctype_handler(XMLParserObject *self, return; } } else { - Py_INCREF(Py_None); - sysid_obj = Py_None; + sysid_obj = Py_NewRef(Py_None); } if (pubid) { @@ -3516,10 +3439,10 @@ expat_start_doctype_handler(XMLParserObject *self, return; } } else { - Py_INCREF(Py_None); - pubid_obj = Py_None; + pubid_obj = Py_NewRef(Py_None); } + elementtreestate *st = ET_STATE_GLOBAL; /* If the target has a handler for doctype, call it. */ if (self->handle_doctype) { res = PyObject_CallFunctionObjArgs(self->handle_doctype, @@ -3527,7 +3450,7 @@ expat_start_doctype_handler(XMLParserObject *self, sysid_obj, NULL); Py_XDECREF(res); } - else if (_PyObject_LookupAttrId((PyObject *)self, &PyId_doctype, &res) > 0) { + else if (_PyObject_LookupAttr((PyObject *)self, st->str_doctype, &res) > 0) { (void)PyErr_WarnEx(PyExc_RuntimeWarning, "The doctype() method of XMLParser is ignored. " "Define doctype() method on the TreeBuilder target.", @@ -4077,39 +4000,37 @@ _elementtree_XMLParser__setevents_impl(XMLParserObject *self, return NULL; } - Py_INCREF(event_name_obj); if (strcmp(event_name, "start") == 0) { - Py_XSETREF(target->start_event_obj, event_name_obj); + Py_XSETREF(target->start_event_obj, Py_NewRef(event_name_obj)); } else if (strcmp(event_name, "end") == 0) { - Py_XSETREF(target->end_event_obj, event_name_obj); + Py_XSETREF(target->end_event_obj, Py_NewRef(event_name_obj)); } else if (strcmp(event_name, "start-ns") == 0) { - Py_XSETREF(target->start_ns_event_obj, event_name_obj); + Py_XSETREF(target->start_ns_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetNamespaceDeclHandler)( self->parser, (XML_StartNamespaceDeclHandler) expat_start_ns_handler, (XML_EndNamespaceDeclHandler) expat_end_ns_handler ); } else if (strcmp(event_name, "end-ns") == 0) { - Py_XSETREF(target->end_ns_event_obj, event_name_obj); + Py_XSETREF(target->end_ns_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetNamespaceDeclHandler)( self->parser, (XML_StartNamespaceDeclHandler) expat_start_ns_handler, (XML_EndNamespaceDeclHandler) expat_end_ns_handler ); } else if (strcmp(event_name, "comment") == 0) { - Py_XSETREF(target->comment_event_obj, event_name_obj); + Py_XSETREF(target->comment_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetCommentHandler)( self->parser, (XML_CommentHandler) expat_comment_handler ); } else if (strcmp(event_name, "pi") == 0) { - Py_XSETREF(target->pi_event_obj, event_name_obj); + Py_XSETREF(target->pi_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetProcessingInstructionHandler)( self->parser, (XML_ProcessingInstructionHandler) expat_pi_handler ); } else { - Py_DECREF(event_name_obj); Py_DECREF(events_seq); PyErr_Format(PyExc_ValueError, "unknown event '%s'", event_name); return NULL; @@ -4375,8 +4296,7 @@ PyInit__elementtree(void) m = PyState_FindModule(&elementtreemodule); if (m) { - Py_INCREF(m); - return m; + return Py_NewRef(m); } /* Initialize object types */ @@ -4420,12 +4340,42 @@ PyInit__elementtree(void) return NULL; } + st->str_append = PyUnicode_InternFromString("append"); + if (st->str_append == NULL) { + return NULL; + } + st->str_find = PyUnicode_InternFromString("find"); + if (st->str_find == NULL) { + return NULL; + } + st->str_findall = PyUnicode_InternFromString("findall"); + if (st->str_findall == NULL) { + return NULL; + } + st->str_findtext = PyUnicode_InternFromString("findtext"); + if (st->str_findtext == NULL) { + return NULL; + } + st->str_iterfind = PyUnicode_InternFromString("iterfind"); + if (st->str_iterfind == NULL) { + return NULL; + } + st->str_tail = PyUnicode_InternFromString("tail"); + if (st->str_tail == NULL) { + return NULL; + } + st->str_text = PyUnicode_InternFromString("text"); + if (st->str_text == NULL) { + return NULL; + } + st->str_doctype = PyUnicode_InternFromString("doctype"); + if (st->str_doctype == NULL) { + return NULL; + } st->parseerror_obj = PyErr_NewException( "xml.etree.ElementTree.ParseError", PyExc_SyntaxError, NULL ); - Py_INCREF(st->parseerror_obj); - if (PyModule_AddObject(m, "ParseError", st->parseerror_obj) < 0) { - Py_DECREF(st->parseerror_obj); + if (PyModule_AddObjectRef(m, "ParseError", st->parseerror_obj) < 0) { return NULL; } diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index e4036e166921d9..4032ba79374fa4 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -1,5 +1,6 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() +#include "pycore_dict.h" // _PyDict_Pop_KnownHash() #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_object.h" // _PyObject_GC_TRACK @@ -105,8 +106,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) if (pto == NULL) return NULL; - pto->fn = func; - Py_INCREF(func); + pto->fn = Py_NewRef(func); nargs = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX); if (nargs == NULL) { @@ -131,8 +131,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) pto->kw = PyDict_New(); } else if (Py_REFCNT(kw) == 1) { - Py_INCREF(kw); - pto->kw = kw; + pto->kw = Py_NewRef(kw); } else { pto->kw = PyDict_Copy(kw); @@ -302,8 +301,7 @@ partial_call(partialobject *pto, PyObject *args, PyObject *kwargs) PyObject *kwargs2; if (PyDict_GET_SIZE(pto->kw) == 0) { /* kwargs can be NULL */ - kwargs2 = kwargs; - Py_XINCREF(kwargs2); + kwargs2 = Py_XNewRef(kwargs); } else { /* bpo-27840, bpo-29318: dictionary of keyword parameters must be @@ -463,8 +461,7 @@ partial_setstate(partialobject *pto, PyObject *state) else Py_INCREF(dict); - Py_INCREF(fn); - Py_SETREF(pto->fn, fn); + Py_SETREF(pto->fn, Py_NewRef(fn)); Py_SETREF(pto->args, fnargs); Py_SETREF(pto->kw, kw); Py_XSETREF(pto->dict, dict); @@ -588,10 +585,8 @@ keyobject_call(keyobject *ko, PyObject *args, PyObject *kwds) if (result == NULL) { return NULL; } - Py_INCREF(ko->cmp); - result->cmp = ko->cmp; - Py_INCREF(object); - result->object = object; + result->cmp = Py_NewRef(ko->cmp); + result->object = Py_NewRef(object); PyObject_GC_Track(result); return (PyObject *)result; } @@ -654,8 +649,7 @@ _functools_cmp_to_key_impl(PyObject *module, PyObject *mycmp) object = PyObject_GC_New(keyobject, state->keyobject_type); if (!object) return NULL; - Py_INCREF(mycmp); - object->cmp = mycmp; + object->cmp = Py_NewRef(mycmp); object->object = NULL; PyObject_GC_Track(object); return (PyObject *)object; @@ -837,12 +831,10 @@ lru_cache_make_key(PyObject *kwd_mark, PyObject *args, if (PyUnicode_CheckExact(key) || PyLong_CheckExact(key)) { /* For common scalar keys, save space by dropping the enclosing args tuple */ - Py_INCREF(key); - return key; + return Py_NewRef(key); } } - Py_INCREF(args); - return args; + return Py_NewRef(args); } key_size = PyTuple_GET_SIZE(args); @@ -858,31 +850,25 @@ lru_cache_make_key(PyObject *kwd_mark, PyObject *args, key_pos = 0; for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) { PyObject *item = PyTuple_GET_ITEM(args, pos); - Py_INCREF(item); - PyTuple_SET_ITEM(key, key_pos++, item); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item)); } if (kwds_size) { - Py_INCREF(kwd_mark); - PyTuple_SET_ITEM(key, key_pos++, kwd_mark); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(kwd_mark)); for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) { - Py_INCREF(keyword); - PyTuple_SET_ITEM(key, key_pos++, keyword); - Py_INCREF(value); - PyTuple_SET_ITEM(key, key_pos++, value); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(keyword)); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(value)); } assert(key_pos == PyTuple_GET_SIZE(args) + kwds_size * 2 + 1); } if (typed) { for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) { PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(args, pos)); - Py_INCREF(item); - PyTuple_SET_ITEM(key, key_pos++, item); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item)); } if (kwds_size) { for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) { PyObject *item = (PyObject *)Py_TYPE(value); - Py_INCREF(item); - PyTuple_SET_ITEM(key, key_pos++, item); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item)); } } } @@ -1084,8 +1070,7 @@ bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds return NULL; } lru_cache_append_link(self, link); - Py_INCREF(result); /* for return */ - return result; + return Py_NewRef(result); } /* Since the cache is full, we need to evict an old key and add a new key. Rather than free the old link and allocate a new @@ -1230,16 +1215,12 @@ lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw) obj->wrapper = wrapper; obj->typed = typed; obj->cache = cachedict; - Py_INCREF(func); - obj->func = func; + obj->func = Py_NewRef(func); obj->misses = obj->hits = 0; obj->maxsize = maxsize; - Py_INCREF(state->kwd_mark); - obj->kwd_mark = state->kwd_mark; - Py_INCREF(state->lru_list_elem_type); - obj->lru_list_elem_type = state->lru_list_elem_type; - Py_INCREF(cache_info_type); - obj->cache_info_type = cache_info_type; + obj->kwd_mark = Py_NewRef(state->kwd_mark); + obj->lru_list_elem_type = (PyTypeObject*)Py_NewRef(state->lru_list_elem_type); + obj->cache_info_type = Py_NewRef(cache_info_type); obj->dict = NULL; obj->weakreflist = NULL; return (PyObject *)obj; @@ -1262,8 +1243,7 @@ lru_cache_clear_list(lru_list_elem *link) { while (link != NULL) { lru_list_elem *next = link->next; - Py_DECREF(link); - link = next; + Py_SETREF(link, next); } } @@ -1306,8 +1286,7 @@ static PyObject * lru_cache_descr_get(PyObject *self, PyObject *obj, PyObject *type) { if (obj == Py_None || obj == NULL) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } return PyMethod_New(self, obj); } @@ -1360,15 +1339,13 @@ lru_cache_reduce(PyObject *self, PyObject *unused) static PyObject * lru_cache_copy(PyObject *self, PyObject *unused) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * lru_cache_deepcopy(PyObject *self, PyObject *unused) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static int diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index a96d32306d5550..4e8acdefc722b2 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -256,8 +256,7 @@ _gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value) res = gdbm_subscript(self, key); if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } return res; } @@ -566,8 +565,7 @@ _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls) static PyObject * gdbm__enter__(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -677,7 +675,6 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, return NULL; } for (flags++; *flags != '\0'; flags++) { - char buf[40]; switch (*flags) { #ifdef GDBM_FAST case 'f': @@ -695,9 +692,8 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, break; #endif default: - PyOS_snprintf(buf, sizeof(buf), "Flag '%c' is not supported.", - *flags); - PyErr_SetString(state->gdbm_error, buf); + PyErr_Format(state->gdbm_error, + "Flag '%c' is not supported.", (unsigned char)*flags); return NULL; } } diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c index 3dbaaa0a0da1d6..07ddc7b0851241 100644 --- a/Modules/_heapqmodule.c +++ b/Modules/_heapqmodule.c @@ -197,8 +197,7 @@ heapreplace_internal(PyObject *heap, PyObject *item, int siftup_func(PyListObjec } returnitem = PyList_GET_ITEM(heap, 0); - Py_INCREF(item); - PyList_SET_ITEM(heap, 0, item); + PyList_SET_ITEM(heap, 0, Py_NewRef(item)); if (siftup_func((PyListObject *)heap, 0)) { Py_DECREF(returnitem); return NULL; @@ -253,8 +252,7 @@ _heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item) int cmp; if (PyList_GET_SIZE(heap) == 0) { - Py_INCREF(item); - return item; + return Py_NewRef(item); } PyObject* top = PyList_GET_ITEM(heap, 0); @@ -264,8 +262,7 @@ _heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item) if (cmp < 0) return NULL; if (cmp == 0) { - Py_INCREF(item); - return item; + return Py_NewRef(item); } if (PyList_GET_SIZE(heap) == 0) { @@ -274,8 +271,7 @@ _heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item) } returnitem = PyList_GET_ITEM(heap, 0); - Py_INCREF(item); - PyList_SET_ITEM(heap, 0, item); + PyList_SET_ITEM(heap, 0, Py_NewRef(item)); if (siftup((PyListObject *)heap, 0)) { Py_DECREF(returnitem); return NULL; @@ -410,8 +406,7 @@ siftdown_max(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos) newitem = arr[pos]; while (pos > startpos) { parentpos = (pos - 1) >> 1; - parent = arr[parentpos]; - Py_INCREF(parent); + parent = Py_NewRef(arr[parentpos]); Py_INCREF(newitem); cmp = PyObject_RichCompareBool(parent, newitem, Py_LT); Py_DECREF(parent); diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c index 38ef24637b7318..af5950cf66c178 100644 --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -59,7 +59,7 @@ PyDoc_STRVAR(module_doc, " I/O classes. open() uses the file's blksize (as obtained by os.stat) if\n" " possible.\n" ); - + /* * The main open() function @@ -74,7 +74,7 @@ _io.open encoding: str(accept={str, NoneType}) = None errors: str(accept={str, NoneType}) = None newline: str(accept={str, NoneType}) = None - closefd: bool(accept={int}) = True + closefd: bool = True opener: object = None Open file and return a stream. Raise OSError upon failure. @@ -196,7 +196,7 @@ static PyObject * _io_open_impl(PyObject *module, PyObject *file, const char *mode, int buffering, const char *encoding, const char *errors, const char *newline, int closefd, PyObject *opener) -/*[clinic end generated code: output=aefafc4ce2b46dc0 input=5bb37f174cb2fb11]*/ +/*[clinic end generated code: output=aefafc4ce2b46dc0 input=cd034e7cdfbf4e78]*/ { unsigned i; @@ -204,16 +204,14 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode, int text = 0, binary = 0; char rawmode[6], *m; - int line_buffering, is_number; - long isatty = 0; + int line_buffering, is_number, isatty = 0; PyObject *raw, *modeobj = NULL, *buffer, *wrapper, *result = NULL, *path_or_fd = NULL; is_number = PyNumber_Check(file); if (is_number) { - path_or_fd = file; - Py_INCREF(path_or_fd); + path_or_fd = Py_NewRef(file); } else { path_or_fd = PyOS_FSPath(file); if (path_or_fd == NULL) { @@ -335,8 +333,7 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode, goto error; result = raw; - Py_DECREF(path_or_fd); - path_or_fd = NULL; + Py_SETREF(path_or_fd, NULL); modeobj = PyUnicode_FromString(mode); if (modeobj == NULL) @@ -347,9 +344,9 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode, PyObject *res = PyObject_CallMethodNoArgs(raw, &_Py_ID(isatty)); if (res == NULL) goto error; - isatty = PyLong_AsLong(res); + isatty = PyObject_IsTrue(res); Py_DECREF(res); - if (isatty == -1 && PyErr_Occurred()) + if (isatty < 0) goto error; } @@ -489,8 +486,7 @@ _io_text_encoding_impl(PyObject *module, PyObject *encoding, int stacklevel) encoding = &_Py_ID(locale); } } - Py_INCREF(encoding); - return encoding; + return Py_NewRef(encoding); } @@ -512,7 +508,7 @@ _io_open_code_impl(PyObject *module, PyObject *path) { return PyFile_OpenCodeObject(path); } - + /* * Private helpers for the io module. */ @@ -697,16 +693,15 @@ PyInit__io(void) "UnsupportedOperation", PyExc_OSError, PyExc_ValueError); if (state->unsupported_operation == NULL) goto fail; - Py_INCREF(state->unsupported_operation); if (PyModule_AddObject(m, "UnsupportedOperation", - state->unsupported_operation) < 0) + Py_NewRef(state->unsupported_operation)) < 0) goto fail; /* BlockingIOError, for compatibility */ - Py_INCREF(PyExc_BlockingIOError); - if (PyModule_AddObject(m, "BlockingIOError", - (PyObject *) PyExc_BlockingIOError) < 0) + if (PyModule_AddObjectRef(m, "BlockingIOError", + (PyObject *) PyExc_BlockingIOError) < 0) { goto fail; + } // Set type base classes PyFileIO_Type.tp_base = &PyRawIOBase_Type; diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c index 4a4a1992dbbb7a..ba8969f0bcd100 100644 --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -389,12 +389,11 @@ buffered_dealloc(buffered *self) static PyObject * buffered_sizeof(buffered *self, PyObject *Py_UNUSED(ignored)) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(self)); - if (self->buffer) - res += self->buffer_size; - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(self)); + if (self->buffer) { + res += (size_t)self->buffer_size; + } + return PyLong_FromSize_t(res); } static int @@ -481,8 +480,7 @@ buffered_close(buffered *self, PyObject *args) if (r < 0) goto end; if (r > 0) { - res = Py_None; - Py_INCREF(res); + res = Py_NewRef(Py_None); goto end; } @@ -748,26 +746,26 @@ _buffered_init(buffered *self) int _PyIO_trap_eintr(void) { - static PyObject *eintr_int = NULL; PyObject *typ, *val, *tb; PyOSErrorObject *env_err; - - if (eintr_int == NULL) { - eintr_int = PyLong_FromLong(EINTR); - assert(eintr_int != NULL); - } if (!PyErr_ExceptionMatches(PyExc_OSError)) return 0; PyErr_Fetch(&typ, &val, &tb); PyErr_NormalizeException(&typ, &val, &tb); env_err = (PyOSErrorObject *) val; assert(env_err != NULL); - if (env_err->myerrno != NULL && - PyObject_RichCompareBool(env_err->myerrno, eintr_int, Py_EQ) > 0) { - Py_DECREF(typ); - Py_DECREF(val); - Py_XDECREF(tb); - return 1; + if (env_err->myerrno != NULL) { + assert(EINTR > 0 && EINTR < INT_MAX); + assert(PyLong_CheckExact(env_err->myerrno)); + int overflow; + int myerrno = PyLong_AsLongAndOverflow(env_err->myerrno, &overflow); + PyErr_Clear(); + if (myerrno == EINTR) { + Py_DECREF(typ); + Py_DECREF(val); + Py_XDECREF(tb); + return 1; + } } /* This silences any error set by PyObject_RichCompareBool() */ PyErr_Restore(typ, val, tb); @@ -1007,8 +1005,7 @@ _buffered_readinto_generic(buffered *self, Py_buffer *buffer, char readinto1) break; if (n < 0) { if (n == -2) { - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); } goto end; } @@ -1422,8 +1419,7 @@ _io_BufferedReader___init___impl(buffered *self, PyObject *raw, if (_PyIOBase_check_readable(raw, Py_True) == NULL) return -1; - Py_INCREF(raw); - Py_XSETREF(self->raw, raw); + Py_XSETREF(self->raw, Py_NewRef(raw)); self->buffer_size = buffer_size; self->readable = 1; self->writable = 0; diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c index 930ef7e29dbcf6..6698c60355fcc5 100644 --- a/Modules/_io/bytesio.c +++ b/Modules/_io/bytesio.c @@ -324,8 +324,7 @@ _io_BytesIO_getbuffer_impl(bytesio *self) buf = (bytesiobuf *) type->tp_alloc(type, 0); if (buf == NULL) return NULL; - Py_INCREF(self); - buf->source = self; + buf->source = (bytesio*)Py_NewRef(self); view = PyMemoryView_FromObject((PyObject *) buf); Py_DECREF(buf); return view; @@ -356,8 +355,7 @@ _io_BytesIO_getvalue_impl(bytesio *self) return NULL; } } - Py_INCREF(self->buf); - return self->buf; + return Py_NewRef(self->buf); } /*[clinic input] @@ -401,8 +399,7 @@ read_bytes(bytesio *self, Py_ssize_t size) self->pos == 0 && size == PyBytes_GET_SIZE(self->buf) && self->exports == 0) { self->pos += size; - Py_INCREF(self->buf); - return self->buf; + return Py_NewRef(self->buf); } output = PyBytes_AS_STRING(self->buf) + self->pos; @@ -791,8 +788,7 @@ bytesio_getstate(bytesio *self, PyObject *Py_UNUSED(ignored)) if (initvalue == NULL) return NULL; if (self->dict == NULL) { - Py_INCREF(Py_None); - dict = Py_None; + dict = Py_NewRef(Py_None); } else { dict = PyDict_Copy(self->dict); @@ -875,8 +871,7 @@ bytesio_setstate(bytesio *self, PyObject *state) return NULL; } else { - Py_INCREF(dict); - self->dict = dict; + self->dict = Py_NewRef(dict); } } @@ -943,8 +938,7 @@ _io_BytesIO___init___impl(bytesio *self, PyObject *initvalue) } if (initvalue && initvalue != Py_None) { if (PyBytes_CheckExact(initvalue)) { - Py_INCREF(initvalue); - Py_XSETREF(self->buf, initvalue); + Py_XSETREF(self->buf, Py_NewRef(initvalue)); self->string_size = PyBytes_GET_SIZE(initvalue); } else { @@ -963,17 +957,15 @@ _io_BytesIO___init___impl(bytesio *self, PyObject *initvalue) static PyObject * bytesio_sizeof(bytesio *self, void *unused) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(self)); + size_t res = _PyObject_SIZE(Py_TYPE(self)); if (self->buf && !SHARED_BUF(self)) { - Py_ssize_t s = _PySys_GetSizeOf(self->buf); - if (s == -1) { + size_t s = _PySys_GetSizeOf(self->buf); + if (s == (size_t)-1) { return NULL; } res += s; } - return PyLong_FromSsize_t(res); + return PyLong_FromSize_t(res); } static int diff --git a/Modules/_io/clinic/_iomodule.c.h b/Modules/_io/clinic/_iomodule.c.h index b38738486f6856..4d76e333b0f293 100644 --- a/Modules/_io/clinic/_iomodule.c.h +++ b/Modules/_io/clinic/_iomodule.c.h @@ -280,8 +280,8 @@ _io_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw } } if (args[6]) { - closefd = _PyLong_AsInt(args[6]); - if (closefd == -1 && PyErr_Occurred()) { + closefd = PyObject_IsTrue(args[6]); + if (closefd < 0) { goto exit; } if (!--noptargs) { @@ -407,4 +407,4 @@ _io_open_code(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=1f8001287a423470 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f387eba3f4c0254a input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/fileio.c.h b/Modules/_io/clinic/fileio.c.h index a925b94fe07531..b6e9bd5b65a029 100644 --- a/Modules/_io/clinic/fileio.c.h +++ b/Modules/_io/clinic/fileio.c.h @@ -116,8 +116,8 @@ _io_FileIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) } } if (fastargs[2]) { - closefd = _PyLong_AsInt(fastargs[2]); - if (closefd == -1 && PyErr_Occurred()) { + closefd = PyObject_IsTrue(fastargs[2]); + if (closefd < 0) { goto exit; } if (!--noptargs) { @@ -466,4 +466,4 @@ _io_FileIO_isatty(fileio *self, PyObject *Py_UNUSED(ignored)) #ifndef _IO_FILEIO_TRUNCATE_METHODDEF #define _IO_FILEIO_TRUNCATE_METHODDEF #endif /* !defined(_IO_FILEIO_TRUNCATE_METHODDEF) */ -/*[clinic end generated code: output=ff479a26cab0d479 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=27f883807a6c29ae input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index 038f0a5c209d49..db968e884cc805 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -68,8 +68,8 @@ _io_IncrementalNewlineDecoder___init__(PyObject *self, PyObject *args, PyObject goto exit; } decoder = fastargs[0]; - translate = _PyLong_AsInt(fastargs[1]); - if (translate == -1 && PyErr_Occurred()) { + translate = PyObject_IsTrue(fastargs[1]); + if (translate < 0) { goto exit; } if (!noptargs) { @@ -137,8 +137,8 @@ _io_IncrementalNewlineDecoder_decode(nldecoder_object *self, PyObject *const *ar if (!noptargs) { goto skip_optional_pos; } - final = _PyLong_AsInt(args[1]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[1]); + if (final < 0) { goto exit; } skip_optional_pos: @@ -331,16 +331,16 @@ _io_TextIOWrapper___init__(PyObject *self, PyObject *args, PyObject *kwargs) } } if (fastargs[4]) { - line_buffering = _PyLong_AsInt(fastargs[4]); - if (line_buffering == -1 && PyErr_Occurred()) { + line_buffering = PyObject_IsTrue(fastargs[4]); + if (line_buffering < 0) { goto exit; } if (!--noptargs) { goto skip_optional_pos; } } - write_through = _PyLong_AsInt(fastargs[5]); - if (write_through == -1 && PyErr_Occurred()) { + write_through = PyObject_IsTrue(fastargs[5]); + if (write_through < 0) { goto exit; } skip_optional_pos: @@ -769,4 +769,4 @@ _io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored)) { return _io_TextIOWrapper_close_impl(self); } -/*[clinic end generated code: output=aecd376eca3cb148 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=73f84b13c343b34b input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/winconsoleio.c.h b/Modules/_io/clinic/winconsoleio.c.h index 65820a8f2ea0b3..df834dbde40f5b 100644 --- a/Modules/_io/clinic/winconsoleio.c.h +++ b/Modules/_io/clinic/winconsoleio.c.h @@ -115,8 +115,8 @@ _io__WindowsConsoleIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) } } if (fastargs[2]) { - closefd = _PyLong_AsInt(fastargs[2]); - if (closefd == -1 && PyErr_Occurred()) { + closefd = PyObject_IsTrue(fastargs[2]); + if (closefd < 0) { goto exit; } if (!--noptargs) { @@ -407,4 +407,4 @@ _io__WindowsConsoleIO_isatty(winconsoleio *self, PyObject *Py_UNUSED(ignored)) #ifndef _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #define _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #endif /* !defined(_IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF) */ -/*[clinic end generated code: output=08ae244e9a44da55 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4920e9068e0cf08a input=a9049054013a1b77]*/ diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index 00859978e8cd6c..d1a183cedac53a 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -198,7 +198,7 @@ extern int _Py_open_cloexec_works; _io.FileIO.__init__ file as nameobj: object mode: str = "r" - closefd: bool(accept={int}) = True + closefd: bool = True opener: object = None Open a file. @@ -219,7 +219,7 @@ results in functionality similar to passing None). static int _io_FileIO___init___impl(fileio *self, PyObject *nameobj, const char *mode, int closefd, PyObject *opener) -/*[clinic end generated code: output=23413f68e6484bbd input=1596c9157a042a39]*/ +/*[clinic end generated code: output=23413f68e6484bbd input=588aac967e0ba74b]*/ { #ifdef MS_WINDOWS Py_UNICODE *widename = NULL; @@ -485,8 +485,12 @@ _io_FileIO___init___impl(fileio *self, PyObject *nameobj, const char *mode, ret = -1; if (!fd_is_own) self->fd = -1; - if (self->fd >= 0) + if (self->fd >= 0) { + PyObject *exc, *val, *tb; + PyErr_Fetch(&exc, &val, &tb); internal_close(self); + _PyErr_ChainExceptions(exc, val, tb); + } done: #ifdef MS_WINDOWS diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index 6ae43a8b3bd626..7b9391ec54d732 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -464,8 +464,7 @@ iobase_enter(PyObject *self, PyObject *args) if (iobase_check_closed(self)) return NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -642,8 +641,7 @@ iobase_iter(PyObject *self) if (iobase_check_closed(self)) return NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c index 0f31c172499ac3..ae6c3125a2d9da 100644 --- a/Modules/_io/stringio.c +++ b/Modules/_io/stringio.c @@ -188,14 +188,12 @@ write_str(stringio *self, PyObject *obj) self->decoder, obj, 1 /* always final */); } else { - decoded = obj; - Py_INCREF(decoded); + decoded = Py_NewRef(obj); } if (self->writenl) { PyObject *translated = PyUnicode_Replace( decoded, &_Py_STR(newline), self->writenl, -1); - Py_DECREF(decoded); - decoded = translated; + Py_SETREF(decoded, translated); } if (decoded == NULL) return -1; @@ -710,8 +708,7 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, is pointless for StringIO) */ if (newline != NULL && newline[0] == '\r') { - self->writenl = self->readnl; - Py_INCREF(self->writenl); + self->writenl = Py_NewRef(self->readnl); } if (self->readuniversal) { @@ -823,8 +820,7 @@ stringio_getstate(stringio *self, PyObject *Py_UNUSED(ignored)) if (initvalue == NULL) return NULL; if (self->dict == NULL) { - Py_INCREF(Py_None); - dict = Py_None; + dict = Py_NewRef(Py_None); } else { dict = PyDict_Copy(self->dict); @@ -934,8 +930,7 @@ stringio_setstate(stringio *self, PyObject *state) return NULL; } else { - Py_INCREF(dict); - self->dict = dict; + self->dict = Py_NewRef(dict); } } diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 3369694653fd96..32ab8a44c62151 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -212,7 +212,7 @@ typedef struct { /*[clinic input] _io.IncrementalNewlineDecoder.__init__ decoder: object - translate: int + translate: bool errors: object(c_default="NULL") = "strict" Codec used when reading a file in universal newlines mode. @@ -229,19 +229,18 @@ static int _io_IncrementalNewlineDecoder___init___impl(nldecoder_object *self, PyObject *decoder, int translate, PyObject *errors) -/*[clinic end generated code: output=fbd04d443e764ec2 input=89db6b19c6b126bf]*/ +/*[clinic end generated code: output=fbd04d443e764ec2 input=ed547aa257616b0e]*/ { - self->decoder = decoder; - Py_INCREF(decoder); if (errors == NULL) { - self->errors = &_Py_ID(strict); + errors = Py_NewRef(&_Py_ID(strict)); } else { - self->errors = errors; + errors = Py_NewRef(errors); } - Py_INCREF(self->errors); + Py_XSETREF(self->errors, errors); + Py_XSETREF(self->decoder, Py_NewRef(decoder)); self->translate = translate ? 1 : 0; self->seennl = 0; self->pendingcr = 0; @@ -276,6 +275,13 @@ check_decoded(PyObject *decoded) return 0; } +#define CHECK_INITIALIZED_DECODER(self) \ + if (self->errors == NULL) { \ + PyErr_SetString(PyExc_ValueError, \ + "IncrementalNewlineDecoder.__init__() not called"); \ + return NULL; \ + } + #define SEEN_CR 1 #define SEEN_LF 2 #define SEEN_CRLF 4 @@ -289,11 +295,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, Py_ssize_t output_len; nldecoder_object *self = (nldecoder_object *) myself; - if (self->decoder == NULL) { - PyErr_SetString(PyExc_ValueError, - "IncrementalNewlineDecoder.__init__ not called"); - return NULL; - } + CHECK_INITIALIZED_DECODER(self); /* decode input (with the eventual \r from a previous pass) */ if (self->decoder != Py_None) { @@ -301,8 +303,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, &_Py_ID(decode), input, final ? Py_True : Py_False, NULL); } else { - output = input; - Py_INCREF(output); + output = Py_NewRef(input); } if (check_decoded(output) < 0) @@ -323,8 +324,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, out = PyUnicode_DATA(modified); PyUnicode_WRITE(kind, out, 0, '\r'); memcpy(out + kind, PyUnicode_DATA(output), kind * output_len); - Py_DECREF(output); - output = modified; /* output remains ready */ + Py_SETREF(output, modified); /* output remains ready */ self->pendingcr = 0; output_len++; } @@ -339,8 +339,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, PyObject *modified = PyUnicode_Substring(output, 0, output_len -1); if (modified == NULL) goto error; - Py_DECREF(output); - output = modified; + Py_SETREF(output, modified); self->pendingcr = 1; } } @@ -485,13 +484,13 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, /*[clinic input] _io.IncrementalNewlineDecoder.decode input: object - final: bool(accept={int}) = False + final: bool = False [clinic start generated code]*/ static PyObject * _io_IncrementalNewlineDecoder_decode_impl(nldecoder_object *self, PyObject *input, int final) -/*[clinic end generated code: output=0d486755bb37a66e input=a4ea97f26372d866]*/ +/*[clinic end generated code: output=0d486755bb37a66e input=90e223c70322c5cd]*/ { return _PyIncrementalNewlineDecoder_decode((PyObject *) self, input, final); } @@ -507,6 +506,8 @@ _io_IncrementalNewlineDecoder_getstate_impl(nldecoder_object *self) PyObject *buffer; unsigned long long flag; + CHECK_INITIALIZED_DECODER(self); + if (self->decoder != Py_None) { PyObject *state = PyObject_CallMethodNoArgs(self->decoder, &_Py_ID(getstate)); @@ -551,6 +552,8 @@ _io_IncrementalNewlineDecoder_setstate(nldecoder_object *self, PyObject *buffer; unsigned long long flag; + CHECK_INITIALIZED_DECODER(self); + if (!PyTuple_Check(state)) { PyErr_SetString(PyExc_TypeError, "state argument must be a tuple"); return NULL; @@ -581,6 +584,8 @@ static PyObject * _io_IncrementalNewlineDecoder_reset_impl(nldecoder_object *self) /*[clinic end generated code: output=32fa40c7462aa8ff input=728678ddaea776df]*/ { + CHECK_INITIALIZED_DECODER(self); + self->seennl = 0; self->pendingcr = 0; if (self->decoder != Py_None) @@ -592,6 +597,8 @@ _io_IncrementalNewlineDecoder_reset_impl(nldecoder_object *self) static PyObject * incrementalnewlinedecoder_newlines_get(nldecoder_object *self, void *context) { + CHECK_INITIALIZED_DECODER(self); + switch (self->seennl) { case SEEN_CR: return PyUnicode_FromString("\r"); @@ -868,8 +875,7 @@ _textiowrapper_set_decoder(textio *self, PyObject *codec_info, self->decoder, self->readtranslate ? Py_True : Py_False, NULL); if (incrementalDecoder == NULL) return -1; - Py_CLEAR(self->decoder); - self->decoder = incrementalDecoder; + Py_XSETREF(self->decoder, incrementalDecoder); } return 0; @@ -1017,8 +1023,8 @@ _io.TextIOWrapper.__init__ encoding: str(accept={str, NoneType}) = None errors: object = None newline: str(accept={str, NoneType}) = None - line_buffering: bool(accept={int}) = False - write_through: bool(accept={int}) = False + line_buffering: bool = False + write_through: bool = False Character and line based layer over a BufferedIOBase object, buffer. @@ -1055,7 +1061,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, const char *encoding, PyObject *errors, const char *newline, int line_buffering, int write_through) -/*[clinic end generated code: output=72267c0c01032ed2 input=72590963698f289b]*/ +/*[clinic end generated code: output=72267c0c01032ed2 input=e6cfaaaf6059d4f5]*/ { PyObject *raw, *codec_info = NULL; PyObject *res; @@ -1148,8 +1154,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, * of the partially constructed object (like self->encoding) */ - Py_INCREF(errors); - self->errors = errors; + self->errors = Py_NewRef(errors); self->chunk_size = 8192; self->line_buffering = line_buffering; self->write_through = write_through; @@ -1157,8 +1162,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, goto error; } - self->buffer = buffer; - Py_INCREF(buffer); + self->buffer = Py_NewRef(buffer); /* Build the decoder object */ if (_textiowrapper_set_decoder(self, codec_info, PyUnicode_AsUTF8(errors)) != 0) @@ -1284,9 +1288,8 @@ textiowrapper_change_encoding(textio *self, PyObject *encoding, } Py_DECREF(codec_info); - Py_INCREF(errors); Py_SETREF(self->encoding, encoding); - Py_SETREF(self->errors, errors); + Py_SETREF(self->errors, Py_NewRef(errors)); return _textiowrapper_fix_encoder_state(self); } @@ -1502,8 +1505,7 @@ _textiowrapper_writeflush(textio *self) PyObject *b; if (PyBytes_Check(pending)) { - b = pending; - Py_INCREF(b); + b = Py_NewRef(pending); } else if (PyUnicode_Check(pending)) { assert(PyUnicode_IS_ASCII(pending)); @@ -1618,8 +1620,7 @@ _io_TextIOWrapper_write_impl(textio *self, PyObject *text) // See bpo-43260 PyUnicode_GET_LENGTH(text) <= self->chunk_size && is_asciicompat_encoding(self->encodefunc)) { - b = text; - Py_INCREF(b); + b = Py_NewRef(text); } else { b = (*self->encodefunc)((PyObject *) self, text); @@ -1741,8 +1742,7 @@ textiowrapper_get_decoded_chars(textio *self, Py_ssize_t n) return NULL; } else { - chars = self->decoded_chars; - Py_INCREF(chars); + chars = Py_NewRef(self->decoded_chars); } self->decoded_chars_used += n; @@ -2139,10 +2139,9 @@ _textiowrapper_readline(textio *self, Py_ssize_t limit) } if (remaining == NULL) { - line = self->decoded_chars; + line = Py_NewRef(self->decoded_chars); start = self->decoded_chars_used; offset_to_buffer = 0; - Py_INCREF(line); } else { assert(self->decoded_chars_used == 0); @@ -3115,8 +3114,7 @@ static PyObject * textiowrapper_errors_get(textio *self, void *context) { CHECK_INITIALIZED(self); - Py_INCREF(self->errors); - return self->errors; + return Py_NewRef(self->errors); } static PyObject * diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c index 5c1a6dd86fc54f..d5de64b4ac3dfd 100644 --- a/Modules/_io/winconsoleio.c +++ b/Modules/_io/winconsoleio.c @@ -235,7 +235,7 @@ winconsoleio_new(PyTypeObject *type, PyObject *args, PyObject *kwds) _io._WindowsConsoleIO.__init__ file as nameobj: object mode: str = "r" - closefd: bool(accept={int}) = True + closefd: bool = True opener: object = None Open a console buffer by file descriptor. @@ -249,7 +249,7 @@ static int _io__WindowsConsoleIO___init___impl(winconsoleio *self, PyObject *nameobj, const char *mode, int closefd, PyObject *opener) -/*[clinic end generated code: output=3fd9cbcdd8d95429 input=06ae4b863c63244b]*/ +/*[clinic end generated code: output=3fd9cbcdd8d95429 input=7a3eed6bbe998fd9]*/ { const char *s; wchar_t *name = NULL; diff --git a/Modules/_json.c b/Modules/_json.c index 1c39b46937d792..429b4ee0fa8d8d 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -7,12 +7,13 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" -#include "pycore_ceval.h" // _Py_EnterRecursiveCall() -#include "structmember.h" // PyMemberDef -#include <stdbool.h> // bool +#include "pycore_ceval.h" // _Py_EnterRecursiveCall() +#include "pycore_runtime.h" // _PyRuntime +#include "structmember.h" // PyMemberDef +#include "pycore_global_objects.h" // _Py_ID() +#include <stdbool.h> // bool typedef struct _PyScannerObject { @@ -305,15 +306,9 @@ static void raise_errmsg(const char *msg, PyObject *s, Py_ssize_t end) { /* Use JSONDecodeError exception to raise a nice looking ValueError subclass */ - _Py_static_string(PyId_decoder, "json.decoder"); - PyObject *decoder = _PyImport_GetModuleId(&PyId_decoder); - if (decoder == NULL) { - return; - } - - _Py_IDENTIFIER(JSONDecodeError); - PyObject *JSONDecodeError = _PyObject_GetAttrId(decoder, &PyId_JSONDecodeError); - Py_DECREF(decoder); + _Py_DECLARE_STR(json_decoder, "json.decoder"); + PyObject *JSONDecodeError = + _PyImport_GetModuleAttr(&_Py_STR(json_decoder), &_Py_ID(JSONDecodeError)); if (JSONDecodeError == NULL) { return; } @@ -561,7 +556,7 @@ py_scanstring(PyObject* Py_UNUSED(self), PyObject *args) Py_ssize_t end; Py_ssize_t next_end = -1; int strict = 1; - if (!PyArg_ParseTuple(args, "On|i:scanstring", &pystr, &end, &strict)) { + if (!PyArg_ParseTuple(args, "On|p:scanstring", &pystr, &end, &strict)) { return NULL; } if (PyUnicode_Check(pystr)) { @@ -714,9 +709,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss if (memokey == NULL) { goto bail; } - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; + Py_SETREF(key, Py_NewRef(memokey)); idx = next_idx; /* skip whitespace between key and : delimiter, read :, skip whitespace */ @@ -1248,16 +1241,17 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (s == NULL) return NULL; - s->markers = markers; - s->defaultfn = defaultfn; - s->encoder = encoder; - s->indent = indent; - s->key_separator = key_separator; - s->item_separator = item_separator; + s->markers = Py_NewRef(markers); + s->defaultfn = Py_NewRef(defaultfn); + s->encoder = Py_NewRef(encoder); + s->indent = Py_NewRef(indent); + s->key_separator = Py_NewRef(key_separator); + s->item_separator = Py_NewRef(item_separator); s->sort_keys = sort_keys; s->skipkeys = skipkeys; s->allow_nan = allow_nan; s->fast_encode = NULL; + if (PyCFunction_Check(s->encoder)) { PyCFunction f = PyCFunction_GetFunction(s->encoder); if (f == (PyCFunction)py_encode_basestring_ascii || @@ -1266,12 +1260,6 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } } - Py_INCREF(s->markers); - Py_INCREF(s->defaultfn); - Py_INCREF(s->encoder); - Py_INCREF(s->indent); - Py_INCREF(s->key_separator); - Py_INCREF(s->item_separator); return (PyObject *)s; } @@ -1310,28 +1298,13 @@ _encoded_const(PyObject *obj) { /* Return the JSON string representation of None, True, False */ if (obj == Py_None) { - _Py_static_string(PyId_null, "null"); - PyObject *s_null = _PyUnicode_FromId(&PyId_null); - if (s_null == NULL) { - return NULL; - } - return Py_NewRef(s_null); + return Py_NewRef(&_Py_ID(null)); } else if (obj == Py_True) { - _Py_static_string(PyId_true, "true"); - PyObject *s_true = _PyUnicode_FromId(&PyId_true); - if (s_true == NULL) { - return NULL; - } - return Py_NewRef(s_true); + return Py_NewRef(&_Py_ID(true)); } else if (obj == Py_False) { - _Py_static_string(PyId_false, "false"); - PyObject *s_false = _PyUnicode_FromId(&PyId_false); - if (s_false == NULL) { - return NULL; - } - return Py_NewRef(s_false); + return Py_NewRef(&_Py_ID(false)); } else { PyErr_SetString(PyExc_ValueError, "not a const"); @@ -1500,8 +1473,7 @@ encoder_encode_key_value(PyEncoderObject *s, _PyUnicodeWriter *writer, bool *fir PyObject *encoded; if (PyUnicode_Check(key)) { - Py_INCREF(key); - keystr = key; + keystr = Py_NewRef(key); } else if (PyFloat_Check(key)) { keystr = encoder_encode_float(s, key); @@ -1530,7 +1502,7 @@ encoder_encode_key_value(PyEncoderObject *s, _PyUnicodeWriter *writer, bool *fir if (*first) { *first = false; - } + } else { if (_PyUnicodeWriter_WriteStr(writer, s->item_separator) < 0) { Py_DECREF(keystr); diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c index a0e262bdac26c6..37170bbea56ad3 100644 --- a/Modules/_lsprof.c +++ b/Modules/_lsprof.c @@ -128,8 +128,7 @@ normalizeUserObj(PyObject *obj) { PyCFunctionObject *fn; if (!PyCFunction_Check(obj)) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } /* Replace built-in function objects with a descriptive string because of built-in methods -- keeping a reference to @@ -142,8 +141,7 @@ normalizeUserObj(PyObject *obj) PyObject *modname = NULL; if (mod != NULL) { if (PyUnicode_Check(mod)) { - modname = mod; - Py_INCREF(modname); + modname = Py_NewRef(mod); } else if (PyModule_Check(mod)) { modname = PyModule_GetNameObject(mod); @@ -555,8 +553,7 @@ static int statsForEntry(rotating_node_t *node, void *arg) } } else { - Py_INCREF(Py_None); - collect->sublist = Py_None; + collect->sublist = Py_NewRef(Py_None); } info = PyObject_CallFunction((PyObject*) collect->state->stats_entry_type, @@ -670,7 +667,7 @@ profiler_enable(ProfilerObject *self, PyObject *args, PyObject *kwds) int subcalls = -1; int builtins = -1; static char *kwlist[] = {"subcalls", "builtins", 0}; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|ii:enable", + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|pp:enable", kwlist, &subcalls, &builtins)) return NULL; if (setSubcalls(self, subcalls) < 0 || setBuiltins(self, builtins) < 0) { @@ -773,7 +770,7 @@ profiler_init(ProfilerObject *pObj, PyObject *args, PyObject *kw) static char *kwlist[] = {"timer", "timeunit", "subcalls", "builtins", 0}; - if (!PyArg_ParseTupleAndKeywords(args, kw, "|Odii:Profiler", kwlist, + if (!PyArg_ParseTupleAndKeywords(args, kw, "|Odpp:Profiler", kwlist, &timer, &timeunit, &subcalls, &builtins)) return -1; @@ -781,8 +778,7 @@ profiler_init(ProfilerObject *pObj, PyObject *args, PyObject *kw) if (setSubcalls(pObj, subcalls) < 0 || setBuiltins(pObj, builtins) < 0) return -1; pObj->externalTimerUnit = timeunit; - Py_XINCREF(timer); - Py_XSETREF(pObj->externalTimer, timer); + Py_XSETREF(pObj->externalTimer, Py_XNewRef(timer)); return 0; } diff --git a/Modules/_multiprocessing/clinic/semaphore.c.h b/Modules/_multiprocessing/clinic/semaphore.c.h index dce0366c266f10..35347169bc1591 100644 --- a/Modules/_multiprocessing/clinic/semaphore.c.h +++ b/Modules/_multiprocessing/clinic/semaphore.c.h @@ -65,8 +65,8 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ goto skip_optional_pos; } if (args[0]) { - blocking = _PyLong_AsInt(args[0]); - if (blocking == -1 && PyErr_Occurred()) { + blocking = PyObject_IsTrue(args[0]); + if (blocking < 0) { goto exit; } if (!--noptargs) { @@ -162,8 +162,8 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ goto skip_optional_pos; } if (args[0]) { - blocking = _PyLong_AsInt(args[0]); - if (blocking == -1 && PyErr_Occurred()) { + blocking = PyObject_IsTrue(args[0]); + if (blocking < 0) { goto exit; } if (!--noptargs) { @@ -275,8 +275,8 @@ _multiprocessing_SemLock(PyTypeObject *type, PyObject *args, PyObject *kwargs) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - unlink = _PyLong_AsInt(fastargs[4]); - if (unlink == -1 && PyErr_Occurred()) { + unlink = PyObject_IsTrue(fastargs[4]); + if (unlink < 0) { goto exit; } return_value = _multiprocessing_SemLock_impl(type, kind, value, maxvalue, name, unlink); @@ -542,4 +542,4 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py #ifndef _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #define _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #endif /* !defined(_MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF) */ -/*[clinic end generated code: output=720d7d0066dc0954 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=dae57a702cc01512 input=a9049054013a1b77]*/ diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c index 58fb0eb96aeeed..897b8db7110a41 100644 --- a/Modules/_multiprocessing/semaphore.c +++ b/Modules/_multiprocessing/semaphore.c @@ -79,7 +79,7 @@ _GetSemaphoreValue(HANDLE handle, long *value) /*[clinic input] _multiprocessing.SemLock.acquire - block as blocking: bool(accept={int}) = True + block as blocking: bool = True timeout as timeout_obj: object = None Acquire the semaphore/lock. @@ -88,7 +88,7 @@ Acquire the semaphore/lock. static PyObject * _multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking, PyObject *timeout_obj) -/*[clinic end generated code: output=f9998f0b6b0b0872 input=86f05662cf753eb4]*/ +/*[clinic end generated code: output=f9998f0b6b0b0872 input=e5b45f5cbb775166]*/ { double timeout; DWORD res, full_msecs, nhandles; @@ -295,7 +295,7 @@ sem_timedwait_save(sem_t *sem, struct timespec *deadline, PyThreadState *_save) /*[clinic input] _multiprocessing.SemLock.acquire - block as blocking: bool(accept={int}) = True + block as blocking: bool = True timeout as timeout_obj: object = None Acquire the semaphore/lock. @@ -304,7 +304,7 @@ Acquire the semaphore/lock. static PyObject * _multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking, PyObject *timeout_obj) -/*[clinic end generated code: output=f9998f0b6b0b0872 input=86f05662cf753eb4]*/ +/*[clinic end generated code: output=f9998f0b6b0b0872 input=e5b45f5cbb775166]*/ { int res, err = 0; struct timespec deadline = {0}; @@ -474,14 +474,14 @@ _multiprocessing.SemLock.__new__ value: int maxvalue: int name: str - unlink: bool(accept={int}) + unlink: bool [clinic start generated code]*/ static PyObject * _multiprocessing_SemLock_impl(PyTypeObject *type, int kind, int value, int maxvalue, const char *name, int unlink) -/*[clinic end generated code: output=30727e38f5f7577a input=b378c3ee27d3a0fa]*/ +/*[clinic end generated code: output=30727e38f5f7577a input=fdaeb69814471c5b]*/ { SEM_HANDLE handle = SEM_FAILED; PyObject *result; diff --git a/Modules/_operator.c b/Modules/_operator.c index 3005845ec93249..4f2367150eefc4 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -722,8 +722,7 @@ _operator_is_not_impl(PyObject *module, PyObject *a, PyObject *b) { PyObject *result; result = (a != b) ? Py_True : Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /* compare_digest **********************************************************/ @@ -1010,8 +1009,7 @@ itemgetter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(item); - ig->item = item; + ig->item = Py_NewRef(item); ig->nitems = nitems; ig->index = -1; if (PyLong_CheckExact(item)) { @@ -1095,8 +1093,7 @@ itemgetter_call_impl(itemgetterobject *ig, PyObject *obj) && ig->index < PyTuple_GET_SIZE(obj)) { result = PyTuple_GET_ITEM(obj, ig->index); - Py_INCREF(result); - return result; + return Py_NewRef(result); } return PyObject_GetItem(obj, ig->item); } @@ -1440,8 +1437,7 @@ dotjoinattr(PyObject *attr, PyObject **attrsep) } return PyUnicode_Join(*attrsep, attr); } else { - Py_INCREF(attr); - return attr; + return Py_NewRef(attr); } } @@ -1594,8 +1590,7 @@ methodcaller_new(PyTypeObject *type, PyObject *args, PyObject *kwds) PyUnicode_InternInPlace(&name); mc->name = name; - Py_XINCREF(kwds); - mc->kwds = kwds; + mc->kwds = Py_XNewRef(kwds); mc->args = PyTuple_GetSlice(args, 1, PyTuple_GET_SIZE(args)); if (mc->args == NULL) { @@ -1740,12 +1735,10 @@ methodcaller_reduce(methodcallerobject *mc, PyObject *Py_UNUSED(ignored)) newargs = PyTuple_New(1 + callargcount); if (newargs == NULL) return NULL; - Py_INCREF(mc->name); - PyTuple_SET_ITEM(newargs, 0, mc->name); + PyTuple_SET_ITEM(newargs, 0, Py_NewRef(mc->name)); for (i = 0; i < callargcount; ++i) { PyObject *arg = PyTuple_GET_ITEM(mc->args, i); - Py_INCREF(arg); - PyTuple_SET_ITEM(newargs, i + 1, arg); + PyTuple_SET_ITEM(newargs, i + 1, Py_NewRef(arg)); } return Py_BuildValue("ON", Py_TYPE(mc), newargs); } diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 52704b0c59ade1..1b34977806b661 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -386,10 +386,9 @@ init_method_ref(PyObject *self, PyObject *name, if (PyMethod_Check(func) && PyMethod_GET_SELF(func) == self) { /* Deconstruct a bound Python method */ - func2 = PyMethod_GET_FUNCTION(func); - Py_INCREF(func2); *method_self = self; /* borrowed */ - Py_XSETREF(*method_func, func2); + func2 = PyMethod_GET_FUNCTION(func); + Py_XSETREF(*method_func, Py_NewRef(func2)); Py_DECREF(func); return 0; } @@ -408,8 +407,7 @@ reconstruct_method(PyObject *func, PyObject *self) return PyMethod_New(func, self); } else { - Py_INCREF(func); - return func; + return Py_NewRef(func); } } @@ -907,8 +905,7 @@ PyMemoTable_Set(PyMemoTable *self, PyObject *key, Py_ssize_t value) entry->me_value = value; return 0; } - Py_INCREF(key); - entry->me_key = key; + entry->me_key = Py_NewRef(key); entry->me_value = value; self->mt_used++; @@ -1196,8 +1193,7 @@ _Pickler_SetBufferCallback(PicklerObject *self, PyObject *buffer_callback) return -1; } - Py_XINCREF(buffer_callback); - self->buffer_callback = buffer_callback; + self->buffer_callback = Py_XNewRef(buffer_callback); return 0; } @@ -1543,9 +1539,8 @@ _Unpickler_MemoPut(UnpicklerObject *self, size_t idx, PyObject *value) return -1; assert(idx < self->memo_size); } - Py_INCREF(value); old_item = self->memo[idx]; - self->memo[idx] = value; + self->memo[idx] = Py_NewRef(value); if (old_item != NULL) { Py_DECREF(old_item); } @@ -1834,8 +1829,7 @@ get_deep_attribute(PyObject *obj, PyObject *names, PyObject **pparent) n = PyList_GET_SIZE(names); for (i = 0; i < n; i++) { PyObject *name = PyList_GET_ITEM(names, i); - Py_XDECREF(parent); - parent = obj; + Py_XSETREF(parent, obj); (void)_PyObject_LookupAttr(parent, name, &obj); if (obj == NULL) { Py_DECREF(parent); @@ -1928,8 +1922,7 @@ whichmodule(PyObject *global, PyObject *dotted_path) i = 0; while (PyDict_Next(modules, &i, &module_name, &module)) { if (_checkmodule(module_name, module, global, dotted_path) == 0) { - Py_INCREF(module_name); - return module_name; + return Py_NewRef(module_name); } if (PyErr_Occurred()) { return NULL; @@ -1965,8 +1958,7 @@ whichmodule(PyObject *global, PyObject *dotted_path) /* If no module is found, use __main__. */ module_name = &_Py_ID(__main__); - Py_INCREF(module_name); - return module_name; + return Py_NewRef(module_name); } /* fast_save_enter() and fast_save_leave() are guards against recursive @@ -3557,10 +3549,8 @@ fix_imports(PyObject **module_name, PyObject **global_name) Py_CLEAR(*module_name); Py_CLEAR(*global_name); - Py_INCREF(fixed_module_name); - Py_INCREF(fixed_global_name); - *module_name = fixed_module_name; - *global_name = fixed_global_name; + *module_name = Py_NewRef(fixed_module_name); + *global_name = Py_NewRef(fixed_global_name); return 0; } else if (PyErr_Occurred()) { @@ -3576,8 +3566,7 @@ fix_imports(PyObject **module_name, PyObject **global_name) Py_TYPE(item)->tp_name); return -1; } - Py_INCREF(item); - Py_XSETREF(*module_name, item); + Py_XSETREF(*module_name, Py_NewRef(item)); } else if (PyErr_Occurred()) { return -1; @@ -3602,8 +3591,7 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) const char global_op = GLOBAL; if (name) { - Py_INCREF(name); - global_name = name; + global_name = Py_NewRef(name); } else { if (_PyObject_LookupAttr(obj, &_Py_ID(__qualname__), &global_name) < 0) @@ -3637,8 +3625,8 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) obj, module_name); goto error; } - lastname = PyList_GET_ITEM(dotted_path, PyList_GET_SIZE(dotted_path)-1); - Py_INCREF(lastname); + lastname = Py_NewRef(PyList_GET_ITEM(dotted_path, + PyList_GET_SIZE(dotted_path) - 1)); cls = get_deep_attribute(module, dotted_path, &parent); Py_CLEAR(dotted_path); if (cls == NULL) { @@ -3728,9 +3716,7 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) else { gen_global: if (parent == module) { - Py_INCREF(lastname); - Py_DECREF(global_name); - global_name = lastname; + Py_SETREF(global_name, Py_NewRef(lastname)); } if (self->proto >= 4) { const char stack_global_op = STACK_GLOBAL; @@ -3932,8 +3918,7 @@ get_class(PyObject *obj) PyObject *cls; if (_PyObject_LookupAttr(obj, &_Py_ID(__class__), &cls) == 0) { - cls = (PyObject *) Py_TYPE(obj); - Py_INCREF(cls); + cls = Py_NewRef(Py_TYPE(obj)); } return cls; } @@ -4084,12 +4069,10 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) return -1; } PyTuple_SET_ITEM(newargs, 0, cls_new); - Py_INCREF(cls); - PyTuple_SET_ITEM(newargs, 1, cls); + PyTuple_SET_ITEM(newargs, 1, Py_NewRef(cls)); for (i = 0; i < PyTuple_GET_SIZE(args); i++) { PyObject *item = PyTuple_GET_ITEM(args, i); - Py_INCREF(item); - PyTuple_SET_ITEM(newargs, i + 2, item); + PyTuple_SET_ITEM(newargs, i + 2, Py_NewRef(item)); } callable = PyObject_Call(st->partial, newargs, kwargs); @@ -4361,8 +4344,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save) if (reduce_value != Py_NotImplemented) { goto reduce; } - Py_DECREF(reduce_value); - reduce_value = NULL; + Py_SETREF(reduce_value, NULL); } if (type == &PyType_Type) { @@ -4405,8 +4387,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save) } } if (reduce_func != NULL) { - Py_INCREF(obj); - reduce_value = _Pickle_FastCall(reduce_func, obj); + reduce_value = _Pickle_FastCall(reduce_func, Py_NewRef(obj)); } else if (PyType_IsSubtype(type, &PyType_Type)) { status = save_global(self, obj, NULL); @@ -4594,26 +4575,25 @@ _pickle_Pickler_dump(PicklerObject *self, PyObject *obj) /*[clinic input] -_pickle.Pickler.__sizeof__ -> Py_ssize_t +_pickle.Pickler.__sizeof__ -> size_t Returns size in memory, in bytes. [clinic start generated code]*/ -static Py_ssize_t +static size_t _pickle_Pickler___sizeof___impl(PicklerObject *self) -/*[clinic end generated code: output=106edb3123f332e1 input=8cbbec9bd5540d42]*/ +/*[clinic end generated code: output=23ad75658d3b59ff input=d8127c8e7012ebd7]*/ { - Py_ssize_t res, s; - - res = _PyObject_SIZE(Py_TYPE(self)); + size_t res = _PyObject_SIZE(Py_TYPE(self)); if (self->memo != NULL) { res += sizeof(PyMemoTable); res += self->memo->mt_allocated * sizeof(PyMemoEntry); } if (self->output_buffer != NULL) { - s = _PySys_GetSizeOf(self->output_buffer); - if (s == -1) + size_t s = _PySys_GetSizeOf(self->output_buffer); + if (s == (size_t)-1) { return -1; + } res += s; } return res; @@ -4869,8 +4849,7 @@ _pickle_PicklerMemoProxy___reduce___impl(PicklerMemoProxyObject *self) return NULL; } PyTuple_SET_ITEM(dict_args, 0, contents); - Py_INCREF((PyObject *)&PyDict_Type); - PyTuple_SET_ITEM(reduce_value, 0, (PyObject *)&PyDict_Type); + PyTuple_SET_ITEM(reduce_value, 0, Py_NewRef(&PyDict_Type)); PyTuple_SET_ITEM(reduce_value, 1, dict_args); return reduce_value; } @@ -4944,8 +4923,7 @@ PicklerMemoProxy_New(PicklerObject *pickler) self = PyObject_GC_New(PicklerMemoProxyObject, &PicklerMemoProxyType); if (self == NULL) return NULL; - Py_INCREF(pickler); - self->pickler = pickler; + self->pickler = (PicklerObject*)Py_NewRef(pickler); PyObject_GC_Track(self); return (PyObject *)self; } @@ -5045,8 +5023,7 @@ Pickler_set_persid(PicklerObject *self, PyObject *value, void *Py_UNUSED(ignored } self->pers_func_self = NULL; - Py_INCREF(value); - Py_XSETREF(self->pers_func, value); + Py_XSETREF(self->pers_func, Py_NewRef(value)); return 0; } @@ -6073,7 +6050,7 @@ load_persid(UnpicklerObject *self) else { PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, - "A load persistent id instruction was encountered,\n" + "A load persistent id instruction was encountered, " "but no persistent_load function was specified."); return -1; } @@ -6100,7 +6077,7 @@ load_binpersid(UnpicklerObject *self) else { PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, - "A load persistent id instruction was encountered,\n" + "A load persistent id instruction was encountered, " "but no persistent_load function was specified."); return -1; } @@ -7101,22 +7078,20 @@ _pickle_Unpickler_find_class_impl(UnpicklerObject *self, /*[clinic input] -_pickle.Unpickler.__sizeof__ -> Py_ssize_t +_pickle.Unpickler.__sizeof__ -> size_t Returns size in memory, in bytes. [clinic start generated code]*/ -static Py_ssize_t +static size_t _pickle_Unpickler___sizeof___impl(UnpicklerObject *self) -/*[clinic end generated code: output=119d9d03ad4c7651 input=13333471fdeedf5e]*/ +/*[clinic end generated code: output=4648d84c228196df input=27180b2b6b524012]*/ { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(self)); + size_t res = _PyObject_SIZE(Py_TYPE(self)); if (self->memo != NULL) res += self->memo_size * sizeof(PyObject *); if (self->marks != NULL) - res += self->marks_size * sizeof(Py_ssize_t); + res += (size_t)self->marks_size * sizeof(Py_ssize_t); if (self->input_line != NULL) res += strlen(self->input_line) + 1; if (self->encoding != NULL) @@ -7370,8 +7345,7 @@ _pickle_UnpicklerMemoProxy___reduce___impl(UnpicklerMemoProxyObject *self) return NULL; } PyTuple_SET_ITEM(constructor_args, 0, contents); - Py_INCREF((PyObject *)&PyDict_Type); - PyTuple_SET_ITEM(reduce_value, 0, (PyObject *)&PyDict_Type); + PyTuple_SET_ITEM(reduce_value, 0, Py_NewRef(&PyDict_Type)); PyTuple_SET_ITEM(reduce_value, 1, constructor_args); return reduce_value; } @@ -7446,8 +7420,7 @@ UnpicklerMemoProxy_New(UnpicklerObject *unpickler) &UnpicklerMemoProxyType); if (self == NULL) return NULL; - Py_INCREF(unpickler); - self->unpickler = unpickler; + self->unpickler = (UnpicklerObject*)Py_NewRef(unpickler); PyObject_GC_Track(self); return (PyObject *)self; } @@ -7483,8 +7456,7 @@ Unpickler_set_memo(UnpicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored return -1; for (size_t i = 0; i < new_memo_size; i++) { - Py_XINCREF(unpickler->memo[i]); - new_memo[i] = unpickler->memo[i]; + new_memo[i] = Py_XNewRef(unpickler->memo[i]); } } else if (PyDict_Check(obj)) { @@ -7564,8 +7536,7 @@ Unpickler_set_persload(UnpicklerObject *self, PyObject *value, void *Py_UNUSED(i } self->pers_func_self = NULL; - Py_INCREF(value); - Py_XSETREF(self->pers_func, value); + Py_XSETREF(self->pers_func, Py_NewRef(value)); return 0; } @@ -7944,8 +7915,7 @@ PyInit__pickle(void) m = PyState_FindModule(&_picklemodule); if (m) { - Py_INCREF(m); - return m; + return Py_NewRef(m); } if (PyType_Ready(&Pdata_Type) < 0) @@ -7986,16 +7956,15 @@ PyInit__pickle(void) if (st->UnpicklingError == NULL) return NULL; - Py_INCREF(st->PickleError); - if (PyModule_AddObject(m, "PickleError", st->PickleError) < 0) + if (PyModule_AddObjectRef(m, "PickleError", st->PickleError) < 0) { return NULL; - Py_INCREF(st->PicklingError); - if (PyModule_AddObject(m, "PicklingError", st->PicklingError) < 0) + } + if (PyModule_AddObjectRef(m, "PicklingError", st->PicklingError) < 0) { return NULL; - Py_INCREF(st->UnpicklingError); - if (PyModule_AddObject(m, "UnpicklingError", st->UnpicklingError) < 0) + } + if (PyModule_AddObjectRef(m, "UnpicklingError", st->UnpicklingError) < 0) { return NULL; - + } if (_Pickle_InitState(st) < 0) return NULL; diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index 717b1cf2202105..b7563ee8250a94 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -814,7 +814,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args) int allow_vfork; if (!PyArg_ParseTuple( - args, "OOpO!OOiiiiiiiiii" _Py_PARSE_PID "OOOiOp:fork_exec", + args, "OOpO!OOiiiiiiiipp" _Py_PARSE_PID "OOOiOp:fork_exec", &process_args, &executable_list, &close_fds, &PyTuple_Type, &py_fds_to_keep, &cwd_obj, &env_list, diff --git a/Modules/_scproxy.c b/Modules/_scproxy.c index 4c1f1aa300c717..344b66f9aad522 100644 --- a/Modules/_scproxy.c +++ b/Modules/_scproxy.c @@ -84,7 +84,7 @@ get_proxy_settings(PyObject* Py_UNUSED(mod), PyObject *Py_UNUSED(ignored)) if (v == NULL) goto error; r = PyDict_SetItemString(result, "exclude_simple", v); - Py_DECREF(v); v = NULL; + Py_SETREF(v, NULL); if (r == -1) goto error; anArray = CFDictionaryGetValue(proxyDict, diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h index e7e78707ee8d5d..4c3fd1bd27411b 100644 --- a/Modules/_sqlite/clinic/connection.c.h +++ b/Modules/_sqlite/clinic/connection.c.h @@ -13,7 +13,8 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, double timeout, int detect_types, const char *isolation_level, int check_same_thread, PyObject *factory, - int cache_size, int uri); + int cache_size, int uri, + enum autocommit_mode autocommit); static int pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) @@ -21,14 +22,14 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) int return_value = -1; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - #define NUM_KEYWORDS 8 + #define NUM_KEYWORDS 9 static struct { PyGC_Head _this_is_not_used; PyObject_VAR_HEAD PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(database), &_Py_ID(timeout), &_Py_ID(detect_types), &_Py_ID(isolation_level), &_Py_ID(check_same_thread), &_Py_ID(factory), &_Py_ID(cached_statements), &_Py_ID(uri), }, + .ob_item = { &_Py_ID(database), &_Py_ID(timeout), &_Py_ID(detect_types), &_Py_ID(isolation_level), &_Py_ID(check_same_thread), &_Py_ID(factory), &_Py_ID(cached_statements), &_Py_ID(uri), &_Py_ID(autocommit), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -37,14 +38,14 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"database", "timeout", "detect_types", "isolation_level", "check_same_thread", "factory", "cached_statements", "uri", NULL}; + static const char * const _keywords[] = {"database", "timeout", "detect_types", "isolation_level", "check_same_thread", "factory", "cached_statements", "uri", "autocommit", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "Connection", .kwtuple = KWTUPLE, }; #undef KWTUPLE - PyObject *argsbuf[8]; + PyObject *argsbuf[9]; PyObject * const *fastargs; Py_ssize_t nargs = PyTuple_GET_SIZE(args); Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 1; @@ -56,6 +57,7 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) PyObject *factory = (PyObject*)clinic_state()->ConnectionType; int cache_size = 128; int uri = 0; + enum autocommit_mode autocommit = LEGACY_TRANSACTION_CONTROL; fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 8, 0, argsbuf); if (!fastargs) { @@ -98,8 +100,8 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) } } if (fastargs[4]) { - check_same_thread = _PyLong_AsInt(fastargs[4]); - if (check_same_thread == -1 && PyErr_Occurred()) { + check_same_thread = PyObject_IsTrue(fastargs[4]); + if (check_same_thread < 0) { goto exit; } if (!--noptargs) { @@ -121,12 +123,24 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) goto skip_optional_pos; } } - uri = PyObject_IsTrue(fastargs[7]); - if (uri < 0) { - goto exit; + if (fastargs[7]) { + uri = PyObject_IsTrue(fastargs[7]); + if (uri < 0) { + goto exit; + } + if (!--noptargs) { + goto skip_optional_pos; + } } skip_optional_pos: - return_value = pysqlite_connection_init_impl((pysqlite_Connection *)self, database, timeout, detect_types, isolation_level, check_same_thread, factory, cache_size, uri); + if (!noptargs) { + goto skip_optional_kwonly; + } + if (!autocommit_converter(fastargs[8], &autocommit)) { + goto exit; + } +skip_optional_kwonly: + return_value = pysqlite_connection_init_impl((pysqlite_Connection *)self, database, timeout, detect_types, isolation_level, check_same_thread, factory, cache_size, uri, autocommit); exit: return return_value; @@ -291,8 +305,8 @@ blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyO goto skip_optional_kwonly; } if (args[3]) { - readonly = _PyLong_AsInt(args[3]); - if (readonly == -1 && PyErr_Occurred()) { + readonly = PyObject_IsTrue(args[3]); + if (readonly < 0) { goto exit; } if (!--noptargs) { @@ -817,8 +831,8 @@ pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *a PyObject *return_value = NULL; int onoff; - onoff = _PyLong_AsInt(arg); - if (onoff == -1 && PyErr_Occurred()) { + onoff = PyObject_IsTrue(arg); + if (onoff < 0) { goto exit; } return_value = pysqlite_connection_enable_load_extension_impl(self, onoff); @@ -1518,4 +1532,4 @@ getlimit(pysqlite_Connection *self, PyObject *arg) #ifndef DESERIALIZE_METHODDEF #define DESERIALIZE_METHODDEF #endif /* !defined(DESERIALIZE_METHODDEF) */ -/*[clinic end generated code: output=beef3eac690a1f88 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f10306e10427488b input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index ceb77bbf8420ff..4c07d5e0b61f8c 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -92,6 +92,30 @@ isolation_level_converter(PyObject *str_or_none, const char **result) return 1; } +static int +autocommit_converter(PyObject *val, enum autocommit_mode *result) +{ + if (Py_IsTrue(val)) { + *result = AUTOCOMMIT_ENABLED; + return 1; + } + if (Py_IsFalse(val)) { + *result = AUTOCOMMIT_DISABLED; + return 1; + } + if (PyLong_Check(val) && + PyLong_AsLong(val) == LEGACY_TRANSACTION_CONTROL) + { + *result = AUTOCOMMIT_LEGACY; + return 1; + } + + PyErr_SetString(PyExc_ValueError, + "autocommit must be True, False, or " + "sqlite3.LEGACY_TRANSACTION_CONTROL"); + return 0; +} + #define clinic_state() (pysqlite_get_state_by_type(Py_TYPE(self))) #include "clinic/connection.c.h" #undef clinic_state @@ -132,13 +156,38 @@ new_statement_cache(pysqlite_Connection *self, pysqlite_state *state, return res; } +static inline int +connection_exec_stmt(pysqlite_Connection *self, const char *sql) +{ + int rc; + Py_BEGIN_ALLOW_THREADS + int len = (int)strlen(sql) + 1; + sqlite3_stmt *stmt; + rc = sqlite3_prepare_v2(self->db, sql, len, &stmt, NULL); + if (rc == SQLITE_OK) { + (void)sqlite3_step(stmt); + rc = sqlite3_finalize(stmt); + } + Py_END_ALLOW_THREADS + + if (rc != SQLITE_OK) { + (void)_pysqlite_seterror(self->state, self->db); + return -1; + } + return 0; +} + /*[python input] class IsolationLevel_converter(CConverter): type = "const char *" converter = "isolation_level_converter" +class Autocommit_converter(CConverter): + type = "enum autocommit_mode" + converter = "autocommit_converter" + [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=cbcfe85b253061c2]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=bc2aa6c7ba0c5f8f]*/ // NB: This needs to be in sync with the sqlite3.connect docstring /*[clinic input] @@ -148,10 +197,12 @@ _sqlite3.Connection.__init__ as pysqlite_connection_init timeout: double = 5.0 detect_types: int = 0 isolation_level: IsolationLevel = "" - check_same_thread: bool(accept={int}) = True + check_same_thread: bool = True factory: object(c_default='(PyObject*)clinic_state()->ConnectionType') = ConnectionType cached_statements as cache_size: int = 128 uri: bool = False + * + autocommit: Autocommit(c_default='LEGACY_TRANSACTION_CONTROL') = sqlite3.LEGACY_TRANSACTION_CONTROL [clinic start generated code]*/ static int @@ -159,8 +210,9 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, double timeout, int detect_types, const char *isolation_level, int check_same_thread, PyObject *factory, - int cache_size, int uri) -/*[clinic end generated code: output=839eb2fee4293bda input=b8ce63dc6f70a383]*/ + int cache_size, int uri, + enum autocommit_mode autocommit) +/*[clinic end generated code: output=cba057313ea7712f input=9b0ab6c12f674fa3]*/ { if (PySys_Audit("sqlite3.connect", "O", database) < 0) { return -1; @@ -227,6 +279,7 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, self->state = state; self->detect_types = detect_types; self->isolation_level = isolation_level; + self->autocommit = autocommit; self->check_same_thread = check_same_thread; self->thread_ident = PyThread_get_thread_ident(); self->statement_cache = statement_cache; @@ -256,6 +309,10 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, } self->initialized = 1; + + if (autocommit == AUTOCOMMIT_DISABLED) { + (void)connection_exec_stmt(self, "BEGIN"); + } return 0; error: @@ -321,10 +378,33 @@ free_callback_contexts(pysqlite_Connection *self) set_callback_context(&self->authorizer_ctx, NULL); } +static void +remove_callbacks(sqlite3 *db) +{ +#ifdef HAVE_TRACE_V2 + sqlite3_trace_v2(db, SQLITE_TRACE_STMT, 0, 0); +#else + sqlite3_trace(db, 0, (void*)0); +#endif + sqlite3_progress_handler(db, 0, 0, (void *)0); + (void)sqlite3_set_authorizer(db, NULL, NULL); +} + static void connection_close(pysqlite_Connection *self) { if (self->db) { + if (self->autocommit == AUTOCOMMIT_DISABLED && + !sqlite3_get_autocommit(self->db)) + { + /* If close is implicitly called as a result of interpreter + * tear-down, we must not call back into Python. */ + if (_Py_IsFinalizing()) { + remove_callbacks(self->db); + } + (void)connection_exec_stmt(self, "ROLLBACK"); + } + free_callback_contexts(self); sqlite3 *db = self->db; @@ -405,7 +485,7 @@ _sqlite3.Connection.blobopen as blobopen Row index. / * - readonly: bool(accept={int}) = False + readonly: bool = False Open the BLOB without write permissions. name: str = "main" Database name. @@ -416,7 +496,7 @@ Open and return a BLOB object. static PyObject * blobopen_impl(pysqlite_Connection *self, const char *table, const char *col, int row, int readonly, const char *name) -/*[clinic end generated code: output=0c8e2e58516d0b5c input=1e7052516acfc94d]*/ +/*[clinic end generated code: output=0c8e2e58516d0b5c input=fa73c83aa7a7ddee]*/ { if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return NULL; @@ -538,24 +618,21 @@ pysqlite_connection_commit_impl(pysqlite_Connection *self) return NULL; } - if (!sqlite3_get_autocommit(self->db)) { - int rc; - - Py_BEGIN_ALLOW_THREADS - sqlite3_stmt *statement; - rc = sqlite3_prepare_v2(self->db, "COMMIT", 7, &statement, NULL); - if (rc == SQLITE_OK) { - (void)sqlite3_step(statement); - rc = sqlite3_finalize(statement); + if (self->autocommit == AUTOCOMMIT_LEGACY) { + if (!sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "COMMIT") < 0) { + return NULL; + } } - Py_END_ALLOW_THREADS - - if (rc != SQLITE_OK) { - (void)_pysqlite_seterror(self->state, self->db); + } + else if (self->autocommit == AUTOCOMMIT_DISABLED) { + if (connection_exec_stmt(self, "COMMIT") < 0) { + return NULL; + } + if (connection_exec_stmt(self, "BEGIN") < 0) { return NULL; } } - Py_RETURN_NONE; } @@ -575,25 +652,21 @@ pysqlite_connection_rollback_impl(pysqlite_Connection *self) return NULL; } - if (!sqlite3_get_autocommit(self->db)) { - int rc; - - Py_BEGIN_ALLOW_THREADS - sqlite3_stmt *statement; - rc = sqlite3_prepare_v2(self->db, "ROLLBACK", 9, &statement, NULL); - if (rc == SQLITE_OK) { - (void)sqlite3_step(statement); - rc = sqlite3_finalize(statement); + if (self->autocommit == AUTOCOMMIT_LEGACY) { + if (!sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "ROLLBACK") < 0) { + return NULL; + } } - Py_END_ALLOW_THREADS - - if (rc != SQLITE_OK) { - (void)_pysqlite_seterror(self->state, self->db); + } + else if (self->autocommit == AUTOCOMMIT_DISABLED) { + if (connection_exec_stmt(self, "ROLLBACK") < 0) { + return NULL; + } + if (connection_exec_stmt(self, "BEGIN") < 0) { return NULL; } - } - Py_RETURN_NONE; } @@ -1491,7 +1564,7 @@ pysqlite_connection_set_trace_callback_impl(pysqlite_Connection *self, /*[clinic input] _sqlite3.Connection.enable_load_extension as pysqlite_connection_enable_load_extension - enable as onoff: bool(accept={int}) + enable as onoff: bool / Enable dynamic loading of SQLite extension modules. @@ -1500,7 +1573,7 @@ Enable dynamic loading of SQLite extension modules. static PyObject * pysqlite_connection_enable_load_extension_impl(pysqlite_Connection *self, int onoff) -/*[clinic end generated code: output=9cac37190d388baf input=5f00e93f7a9d3540]*/ +/*[clinic end generated code: output=9cac37190d388baf input=2a1e87931486380f]*/ { int rc; @@ -2270,6 +2343,48 @@ getlimit_impl(pysqlite_Connection *self, int category) } +static PyObject * +get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return NULL; + } + if (self->autocommit == AUTOCOMMIT_ENABLED) { + Py_RETURN_TRUE; + } + if (self->autocommit == AUTOCOMMIT_DISABLED) { + Py_RETURN_FALSE; + } + return PyLong_FromLong(LEGACY_TRANSACTION_CONTROL); +} + +static int +set_autocommit(pysqlite_Connection *self, PyObject *val, void *Py_UNUSED(ctx)) +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return -1; + } + if (!autocommit_converter(val, &self->autocommit)) { + return -1; + } + if (self->autocommit == AUTOCOMMIT_ENABLED) { + if (!sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "COMMIT") < 0) { + return -1; + } + } + } + else if (self->autocommit == AUTOCOMMIT_DISABLED) { + if (sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "BEGIN") < 0) { + return -1; + } + } + } + return 0; +} + + static const char connection_doc[] = PyDoc_STR("SQLite database connection object."); @@ -2277,6 +2392,7 @@ static PyGetSetDef connection_getset[] = { {"isolation_level", (getter)pysqlite_connection_get_isolation_level, (setter)pysqlite_connection_set_isolation_level}, {"total_changes", (getter)pysqlite_connection_get_total_changes, (setter)0}, {"in_transaction", (getter)pysqlite_connection_get_in_transaction, (setter)0}, + {"autocommit", (getter)get_autocommit, (setter)set_autocommit}, {NULL} }; diff --git a/Modules/_sqlite/connection.h b/Modules/_sqlite/connection.h index 629fe3d3a95a11..1df92065a587a2 100644 --- a/Modules/_sqlite/connection.h +++ b/Modules/_sqlite/connection.h @@ -39,6 +39,12 @@ typedef struct _callback_context pysqlite_state *state; } callback_context; +enum autocommit_mode { + AUTOCOMMIT_LEGACY = LEGACY_TRANSACTION_CONTROL, + AUTOCOMMIT_ENABLED = 1, + AUTOCOMMIT_DISABLED = 0, +}; + typedef struct { PyObject_HEAD @@ -51,6 +57,7 @@ typedef struct /* NULL for autocommit, otherwise a string with the isolation level */ const char *isolation_level; + enum autocommit_mode autocommit; /* 1 if a check should be performed for each API call if the connection is * used from the same thread it was created in */ diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c index cbf4718365fee6..a4e22bb4a2b58d 100644 --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -855,7 +855,8 @@ _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject* operation /* We start a transaction implicitly before a DML statement. SELECT is the only exception. See #9924. */ - if (self->connection->isolation_level + if (self->connection->autocommit == AUTOCOMMIT_LEGACY + && self->connection->isolation_level && self->statement->is_dml && sqlite3_get_autocommit(self->connection->db)) { @@ -1033,7 +1034,9 @@ pysqlite_cursor_executescript_impl(pysqlite_Cursor *self, // Commit if needed sqlite3 *db = self->connection->db; - if (!sqlite3_get_autocommit(db)) { + if (self->connection->autocommit == AUTOCOMMIT_LEGACY + && !sqlite3_get_autocommit(db)) + { int rc = SQLITE_OK; Py_BEGIN_ALLOW_THREADS @@ -1120,8 +1123,7 @@ pysqlite_cursor_iternext(pysqlite_Cursor *self) PyObject *factory = self->row_factory; PyObject *args[] = { (PyObject *)self, row, }; PyObject *new_row = PyObject_Vectorcall(factory, args, 2, NULL); - Py_DECREF(row); - row = new_row; + Py_SETREF(row, new_row); } return row; } diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 707aae160ab4f3..6db3d51fd20220 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -46,7 +46,8 @@ module _sqlite3 PyDoc_STRVAR(module_connect_doc, "connect($module, /, database, timeout=5.0, detect_types=0,\n" " isolation_level='', check_same_thread=True,\n" -" factory=ConnectionType, cached_statements=128, uri=False)\n" +" factory=ConnectionType, cached_statements=128, uri=False, *,\n" +" autocommit=sqlite3.LEGACY_TRANSACTION_CONTROL)\n" "--\n" "\n" "Opens a connection to the SQLite database file database.\n" @@ -706,6 +707,10 @@ module_exec(PyObject *module) goto error; } + if (PyModule_AddIntMacro(module, LEGACY_TRANSACTION_CONTROL) < 0) { + goto error; + } + int threadsafety = get_threadsafety(state); if (threadsafety < 0) { goto error; diff --git a/Modules/_sqlite/module.h b/Modules/_sqlite/module.h index 7deba22ffec1b6..daa22091d38ad7 100644 --- a/Modules/_sqlite/module.h +++ b/Modules/_sqlite/module.h @@ -26,6 +26,8 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" +#define LEGACY_TRANSACTION_CONTROL -1 + #define PYSQLITE_VERSION "2.6.0" #define MODULE_NAME "sqlite3" diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index aae02652664f72..4b6290a5967932 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -459,8 +459,7 @@ state_init(SRE_STATE* state, PatternObject* pattern, PyObject* string, state->start = (void*) ((char*) ptr + start * state->charsize); state->end = (void*) ((char*) ptr + end * state->charsize); - Py_INCREF(string); - state->string = string; + state->string = Py_NewRef(string); state->pos = start; state->endpos = end; @@ -499,8 +498,7 @@ getslice(int isbytes, const void *ptr, if (isbytes) { if (PyBytes_CheckExact(string) && start == 0 && end == PyBytes_GET_SIZE(string)) { - Py_INCREF(string); - return string; + return Py_NewRef(string); } return PyBytes_FromStringAndSize( (const char *)ptr + start, end - start); @@ -1089,8 +1087,7 @@ pattern_subx(_sremodulestate* module_state, if (PyCallable_Check(ptemplate)) { /* sub/subn takes either a function or a template */ - filter = ptemplate; - Py_INCREF(filter); + filter = Py_NewRef(ptemplate); filter_type = CALLABLE; } else { /* if not callable, check if it's a literal string */ @@ -1109,8 +1106,7 @@ pattern_subx(_sremodulestate* module_state, if (view.buf) PyBuffer_Release(&view); if (literal) { - filter = ptemplate; - Py_INCREF(filter); + filter = Py_NewRef(ptemplate); filter_type = LITERAL; } else { /* not a literal; hand it over to the template compiler */ @@ -1120,8 +1116,8 @@ pattern_subx(_sremodulestate* module_state, assert(Py_TYPE(filter) == module_state->Template_Type); if (Py_SIZE(filter) == 0) { - Py_INCREF(((TemplateObject *)filter)->literal); - Py_SETREF(filter, ((TemplateObject *)filter)->literal); + Py_SETREF(filter, + Py_NewRef(((TemplateObject *)filter)->literal)); filter_type = LITERAL; } else { @@ -1195,8 +1191,7 @@ pattern_subx(_sremodulestate* module_state, goto error; } else { /* filter is literal string */ - item = filter; - Py_INCREF(item); + item = Py_NewRef(filter); } /* add to list */ @@ -1317,8 +1312,7 @@ static PyObject * _sre_SRE_Pattern___copy___impl(PatternObject *self) /*[clinic end generated code: output=85dedc2db1bd8694 input=a730a59d863bc9f5]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] @@ -1333,8 +1327,7 @@ static PyObject * _sre_SRE_Pattern___deepcopy__(PatternObject *self, PyObject *memo) /*[clinic end generated code: output=2ad25679c1f1204a input=a465b1602f997bed]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -1500,19 +1493,16 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags, PyBuffer_Release(&view); } - Py_INCREF(pattern); - self->pattern = pattern; + self->pattern = Py_NewRef(pattern); self->flags = flags; self->groups = groups; if (PyDict_GET_SIZE(groupindex) > 0) { - Py_INCREF(groupindex); - self->groupindex = groupindex; + self->groupindex = Py_NewRef(groupindex); if (PyTuple_GET_SIZE(indexgroup) > 0) { - Py_INCREF(indexgroup); - self->indexgroup = indexgroup; + self->indexgroup = Py_NewRef(indexgroup); } } @@ -1555,8 +1545,7 @@ _sre_template_impl(PyObject *module, PyObject *pattern, PyObject *template) if (!self) return NULL; self->chunks = 1 + 2*n; - self->literal = PyList_GET_ITEM(template, 0); - Py_INCREF(self->literal); + self->literal = Py_NewRef(PyList_GET_ITEM(template, 0)); for (Py_ssize_t i = 0; i < n; i++) { Py_ssize_t index = PyLong_AsSsize_t(PyList_GET_ITEM(template, 2*i+1)); if (index == -1 && PyErr_Occurred()) { @@ -1576,8 +1565,7 @@ _sre_template_impl(PyObject *module, PyObject *pattern, PyObject *template) literal = NULL; self->chunks--; } - Py_XINCREF(literal); - self->items[i].literal = literal; + self->items[i].literal = Py_XNewRef(literal); } return (PyObject*) self; @@ -1623,7 +1611,7 @@ _sre_template_impl(PyObject *module, PyObject *pattern, PyObject *template) #endif /* Report failure */ -#define FAIL do { VTRACE(("FAIL: %d\n", __LINE__)); return 0; } while (0) +#define FAIL do { VTRACE(("FAIL: %d\n", __LINE__)); return -1; } while (0) /* Extract opcode, argument, or skip count from code array */ #define GET_OP \ @@ -1647,7 +1635,7 @@ _sre_template_impl(PyObject *module, PyObject *pattern, PyObject *template) skip = *code; \ VTRACE(("%lu (skip to %p)\n", \ (unsigned long)skip, code+skip)); \ - if (skip-adj > (uintptr_t)(end - code)) \ + if (skip-adj > (uintptr_t)(end - code)) \ FAIL; \ code++; \ } while (0) @@ -1736,9 +1724,10 @@ _validate_charset(SRE_CODE *code, SRE_CODE *end) } } - return 1; + return 0; } +/* Returns 0 on success, -1 on failure, and 1 if the last op is JUMP. */ static int _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) { @@ -1816,7 +1805,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) case SRE_OP_IN_LOC_IGNORE: GET_SKIP; /* Stop 1 before the end; we check the FAILURE below */ - if (!_validate_charset(code, code+skip-2)) + if (_validate_charset(code, code+skip-2)) FAIL; if (code[skip-2] != SRE_OP_FAILURE) FAIL; @@ -1870,7 +1859,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) } /* Validate the charset */ if (flags & SRE_INFO_CHARSET) { - if (!_validate_charset(code, newcode-1)) + if (_validate_charset(code, newcode-1)) FAIL; if (newcode[-1] != SRE_OP_FAILURE) FAIL; @@ -1891,7 +1880,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) if (skip == 0) break; /* Stop 2 before the end; we check the JUMP below */ - if (!_validate_inner(code, code+skip-3, groups)) + if (_validate_inner(code, code+skip-3, groups)) FAIL; code += skip-3; /* Check that it ends with a JUMP, and that each JUMP @@ -1905,6 +1894,8 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) else if (code+skip-1 != target) FAIL; } + if (code != target) + FAIL; } break; @@ -1920,7 +1911,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) FAIL; if (max > SRE_MAXREPEAT) FAIL; - if (!_validate_inner(code, code+skip-4, groups)) + if (_validate_inner(code, code+skip-4, groups)) FAIL; code += skip-4; GET_OP; @@ -1940,7 +1931,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) FAIL; if (max > SRE_MAXREPEAT) FAIL; - if (!_validate_inner(code, code+skip-3, groups)) + if (_validate_inner(code, code+skip-3, groups)) FAIL; code += skip-3; GET_OP; @@ -1958,7 +1949,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) case SRE_OP_ATOMIC_GROUP: { GET_SKIP; - if (!_validate_inner(code, code+skip-2, groups)) + if (_validate_inner(code, code+skip-2, groups)) FAIL; code += skip-2; GET_OP; @@ -2010,24 +2001,17 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) to allow arbitrary jumps anywhere in the code; so we just look for a JUMP opcode preceding our skip target. */ - if (skip >= 3 && skip-3 < (uintptr_t)(end - code) && - code[skip-3] == SRE_OP_JUMP) - { - VTRACE(("both then and else parts present\n")); - if (!_validate_inner(code+1, code+skip-3, groups)) - FAIL; + VTRACE(("then part:\n")); + int rc = _validate_inner(code+1, code+skip-1, groups); + if (rc == 1) { + VTRACE(("else part:\n")); code += skip-2; /* Position after JUMP, at <skipno> */ GET_SKIP; - if (!_validate_inner(code, code+skip-1, groups)) - FAIL; - code += skip-1; - } - else { - VTRACE(("only a then part present\n")); - if (!_validate_inner(code+1, code+skip-1, groups)) - FAIL; - code += skip-1; + rc = _validate_inner(code, code+skip-1, groups); } + if (rc) + FAIL; + code += skip-1; break; case SRE_OP_ASSERT: @@ -2038,7 +2022,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) if (arg & 0x80000000) FAIL; /* Width too large */ /* Stop 1 before the end; we check the SUCCESS below */ - if (!_validate_inner(code+1, code+skip-2, groups)) + if (_validate_inner(code+1, code+skip-2, groups)) FAIL; code += skip-2; GET_OP; @@ -2046,6 +2030,12 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) FAIL; break; + case SRE_OP_JUMP: + if (code + 1 != end) + FAIL; + VTRACE(("JUMP: %d\n", __LINE__)); + return 1; + default: FAIL; @@ -2053,7 +2043,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) } VTRACE(("okay\n")); - return 1; + return 0; } static int @@ -2068,7 +2058,7 @@ _validate_outer(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) static int _validate(PatternObject *self) { - if (!_validate_outer(self->code, self->code+self->codesize, self->groups)) + if (_validate_outer(self->code, self->code+self->codesize, self->groups)) { PyErr_SetString(PyExc_RuntimeError, "invalid SRE code"); return 0; @@ -2126,8 +2116,7 @@ match_getslice_by_index(MatchObject* self, Py_ssize_t index, PyObject* def) if (self->string == Py_None || self->mark[index] < 0) { /* return default value if the string or group is undefined */ - Py_INCREF(def); - return def; + return Py_NewRef(def); } ptr = getstring(self->string, &length, &isbytes, &charsize, &view); @@ -2446,8 +2435,7 @@ match_regs(MatchObject* self) PyTuple_SET_ITEM(regs, index, item); } - Py_INCREF(regs); - self->regs = regs; + self->regs = Py_NewRef(regs); return regs; } @@ -2461,8 +2449,7 @@ static PyObject * _sre_SRE_Match___copy___impl(MatchObject *self) /*[clinic end generated code: output=a779c5fc8b5b4eb4 input=3bb4d30b6baddb5b]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] @@ -2477,8 +2464,7 @@ static PyObject * _sre_SRE_Match___deepcopy__(MatchObject *self, PyObject *memo) /*[clinic end generated code: output=ba7cb46d655e4ee2 input=779d12a31c2c325e]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } PyDoc_STRVAR(match_doc, @@ -2507,8 +2493,7 @@ match_lastgroup_get(MatchObject *self, void *Py_UNUSED(ignored)) { PyObject *result = PyTuple_GET_ITEM(self->pattern->indexgroup, self->lastindex); - Py_INCREF(result); - return result; + return Py_NewRef(result); } Py_RETURN_NONE; } @@ -2517,8 +2502,7 @@ static PyObject * match_regs_get(MatchObject *self, void *Py_UNUSED(ignored)) { if (self->regs) { - Py_INCREF(self->regs); - return self->regs; + return Py_NewRef(self->regs); } else return match_regs(self); } @@ -2562,11 +2546,9 @@ pattern_new_match(_sremodulestate* module_state, if (!match) return NULL; - Py_INCREF(pattern); - match->pattern = pattern; + match->pattern = (PatternObject*)Py_NewRef(pattern); - Py_INCREF(state->string); - match->string = state->string; + match->string = Py_NewRef(state->string); match->regs = NULL; match->groups = pattern->groups+1; @@ -2786,8 +2768,7 @@ pattern_scanner(_sremodulestate *module_state, return NULL; } - Py_INCREF(self); - scanner->pattern = (PyObject*) self; + scanner->pattern = Py_NewRef(self); PyObject_GC_Track(scanner); return (PyObject*) scanner; @@ -2832,8 +2813,7 @@ static PyObject * expand_template(TemplateObject *self, MatchObject *match) { if (Py_SIZE(self) == 0) { - Py_INCREF(self->literal); - return self->literal; + return Py_NewRef(self->literal); } PyObject *result = NULL; @@ -2853,8 +2833,7 @@ expand_template(TemplateObject *self, MatchObject *match) out = &PyList_GET_ITEM(list, 0); } - Py_INCREF(self->literal); - out[count++] = self->literal; + out[count++] = Py_NewRef(self->literal); for (Py_ssize_t i = 0; i < Py_SIZE(self); i++) { Py_ssize_t index = self->items[i].index; if (index >= match->groups) { @@ -2866,15 +2845,13 @@ expand_template(TemplateObject *self, MatchObject *match) goto cleanup; } if (item != Py_None) { - Py_INCREF(item); - out[count++] = item; + out[count++] = Py_NewRef(item); } Py_DECREF(item); PyObject *literal = self->items[i].literal; if (literal != NULL) { - Py_INCREF(literal); - out[count++] = literal; + out[count++] = Py_NewRef(literal); } } diff --git a/Modules/_ssl.c b/Modules/_ssl.c index bf8bd9dea89b6b..591eb91dd0f340 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -415,8 +415,7 @@ static PyObject * SSLError_str(PyOSErrorObject *self) { if (self->strerror != NULL && PyUnicode_Check(self->strerror)) { - Py_INCREF(self->strerror); - return self->strerror; + return Py_NewRef(self->strerror); } else return PyObject_Str(self->args); @@ -500,8 +499,7 @@ fill_and_set_sslerror(_sslmodulestate *state, if (verify_str != NULL) { verify_obj = PyUnicode_FromString(verify_str); } else { - verify_obj = Py_None; - Py_INCREF(verify_obj); + verify_obj = Py_NewRef(Py_None); } break; } @@ -800,8 +798,7 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, self->ssl = NULL; self->Socket = NULL; - self->ctx = sslctx; - Py_INCREF(sslctx); + self->ctx = (PySSLContext*)Py_NewRef(sslctx); self->shutdown_seen_zero = 0; self->owner = NULL; self->server_hostname = NULL; @@ -1026,8 +1023,7 @@ _asn1obj2py(_sslmodulestate *state, const ASN1_OBJECT *name, int no_name) } } if (!buflen && no_name) { - Py_INCREF(Py_None); - name_obj = Py_None; + name_obj = Py_NewRef(Py_None); } else { name_obj = PyUnicode_FromStringAndSize(namebuf, buflen); @@ -1876,8 +1872,7 @@ _ssl__SSLSocket_get_unverified_chain_impl(PySSLSocket *self) X509 *peer = SSL_get_peer_certificate(self->ssl); if (peer == NULL) { - peerobj = Py_None; - Py_INCREF(peerobj); + peerobj = Py_NewRef(Py_None); } else { /* consume X509 reference on success */ peerobj = _PySSL_CertificateFromX509(self->ctx->state, peer, 0); @@ -1907,8 +1902,7 @@ cipher_to_tuple(const SSL_CIPHER *cipher) cipher_name = SSL_CIPHER_get_name(cipher); if (cipher_name == NULL) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(retval, 0, Py_None); + PyTuple_SET_ITEM(retval, 0, Py_NewRef(Py_None)); } else { v = PyUnicode_FromString(cipher_name); if (v == NULL) @@ -1918,8 +1912,7 @@ cipher_to_tuple(const SSL_CIPHER *cipher) cipher_protocol = SSL_CIPHER_get_version(cipher); if (cipher_protocol == NULL) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(retval, 1, Py_None); + PyTuple_SET_ITEM(retval, 1, Py_NewRef(Py_None)); } else { v = PyUnicode_FromString(cipher_protocol); if (v == NULL) @@ -2103,16 +2096,14 @@ _ssl__SSLSocket_compression_impl(PySSLSocket *self) } static PySSLContext *PySSL_get_context(PySSLSocket *self, void *closure) { - Py_INCREF(self->ctx); - return self->ctx; + return (PySSLContext*)Py_NewRef(self->ctx); } static int PySSL_set_context(PySSLSocket *self, PyObject *value, void *closure) { if (PyObject_TypeCheck(value, self->ctx->state->PySSLContext_Type)) { - Py_INCREF(value); - Py_SETREF(self->ctx, (PySSLContext *)value); + Py_SETREF(self->ctx, (PySSLContext *)Py_NewRef(value)); SSL_set_SSL_CTX(self->ssl, self->ctx->ctx); /* Set SSL* internal msg_callback to state of new context's state */ SSL_set_msg_callback( @@ -2150,8 +2141,7 @@ PySSL_get_server_hostname(PySSLSocket *self, void *c) { if (self->server_hostname == NULL) Py_RETURN_NONE; - Py_INCREF(self->server_hostname); - return self->server_hostname; + return Py_NewRef(self->server_hostname); } PyDoc_STRVAR(PySSL_get_server_hostname_doc, @@ -2166,8 +2156,7 @@ PySSL_get_owner(PySSLSocket *self, void *c) Py_RETURN_NONE; owner = PyWeakref_GetObject(self->owner); - Py_INCREF(owner); - return owner; + return Py_NewRef(owner); } static int @@ -2820,8 +2809,7 @@ PySSL_get_session(PySSLSocket *self, void *closure) { } assert(self->ctx); - pysess->ctx = self->ctx; - Py_INCREF(pysess->ctx); + pysess->ctx = (PySSLContext*)Py_NewRef(self->ctx); pysess->session = session; PyObject_GC_Track(pysess); return (PyObject *)pysess; @@ -4180,7 +4168,7 @@ _ssl__SSLContext_load_dh_params(PySSLContext *self, PyObject *filepath) /*[clinic input] _ssl._SSLContext._wrap_socket sock: object(subclass_of="get_state_ctx(self)->Sock_Type") - server_side: int + server_side: bool server_hostname as hostname_obj: object = None * owner: object = None @@ -4192,7 +4180,7 @@ static PyObject * _ssl__SSLContext__wrap_socket_impl(PySSLContext *self, PyObject *sock, int server_side, PyObject *hostname_obj, PyObject *owner, PyObject *session) -/*[clinic end generated code: output=f103f238633940b4 input=f5916eadbc6eae81]*/ +/*[clinic end generated code: output=f103f238633940b4 input=700ca8fedff53994]*/ { char *hostname = NULL; PyObject *res; @@ -4217,7 +4205,7 @@ _ssl__SSLContext__wrap_socket_impl(PySSLContext *self, PyObject *sock, _ssl._SSLContext._wrap_bio incoming: object(subclass_of="get_state_ctx(self)->PySSLMemoryBIO_Type", type="PySSLMemoryBIO *") outgoing: object(subclass_of="get_state_ctx(self)->PySSLMemoryBIO_Type", type="PySSLMemoryBIO *") - server_side: int + server_side: bool server_hostname as hostname_obj: object = None * owner: object = None @@ -4230,7 +4218,7 @@ _ssl__SSLContext__wrap_bio_impl(PySSLContext *self, PySSLMemoryBIO *incoming, PySSLMemoryBIO *outgoing, int server_side, PyObject *hostname_obj, PyObject *owner, PyObject *session) -/*[clinic end generated code: output=5c5d6d9b41f99332 input=331edeec9c738382]*/ +/*[clinic end generated code: output=5c5d6d9b41f99332 input=a9205d097fd45a82]*/ { char *hostname = NULL; PyObject *res; @@ -4459,8 +4447,7 @@ get_sni_callback(PySSLContext *self, void *c) if (cb == NULL) { Py_RETURN_NONE; } - Py_INCREF(cb); - return cb; + return Py_NewRef(cb); } static int @@ -4482,8 +4469,7 @@ set_sni_callback(PySSLContext *self, PyObject *arg, void *c) "not a callable object"); return -1; } - Py_INCREF(arg); - self->set_sni_cb = arg; + self->set_sni_cb = Py_NewRef(arg); SSL_CTX_set_tlsext_servername_callback(self->ctx, _servername_callback); SSL_CTX_set_tlsext_servername_arg(self->ctx, self); } @@ -5196,7 +5182,7 @@ _ssl_get_default_verify_paths_impl(PyObject *module) #define CONVERT(info, target) { \ const char *tmp = (info); \ target = NULL; \ - if (!tmp) { Py_INCREF(Py_None); target = Py_None; } \ + if (!tmp) { target = Py_NewRef(Py_None); } \ else if ((target = PyUnicode_DecodeFSDefault(tmp)) == NULL) { \ target = PyBytes_FromString(tmp); } \ if (!target) goto error; \ @@ -5311,11 +5297,9 @@ certEncodingType(DWORD encodingType) } switch(encodingType) { case X509_ASN_ENCODING: - Py_INCREF(x509_asn); - return x509_asn; + return Py_NewRef(x509_asn); case PKCS_7_ASN_ENCODING: - Py_INCREF(pkcs_7_asn); - return pkcs_7_asn; + return Py_NewRef(pkcs_7_asn); default: return PyLong_FromLong(encodingType); } @@ -5717,8 +5701,7 @@ sslmodule_init_socketapi(PyObject *module) if ((sockmod == NULL) || (sockmod->Sock_Type == NULL)) { return -1; } - state->Sock_Type = sockmod->Sock_Type; - Py_INCREF(state->Sock_Type); + state->Sock_Type = (PyTypeObject*)Py_NewRef(sockmod->Sock_Type); return 0; } @@ -5881,6 +5864,9 @@ sslmodule_init_constants(PyObject *m) PyModule_AddIntConstant(m, "OP_IGNORE_UNEXPECTED_EOF", SSL_OP_IGNORE_UNEXPECTED_EOF); #endif +#ifdef SSL_OP_ENABLE_KTLS + PyModule_AddIntConstant(m, "OP_ENABLE_KTLS", SSL_OP_ENABLE_KTLS); +#endif #ifdef X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT PyModule_AddIntConstant(m, "HOSTFLAG_ALWAYS_CHECK_SUBJECT", @@ -5925,8 +5911,7 @@ sslmodule_init_constants(PyObject *m) #define addbool(m, key, value) \ do { \ PyObject *bool_obj = (value) ? Py_True : Py_False; \ - Py_INCREF(bool_obj); \ - PyModule_AddObject((m), (key), bool_obj); \ + PyModule_AddObject((m), (key), Py_NewRef(bool_obj)); \ } while (0) addbool(m, "HAS_SNI", 1); diff --git a/Modules/_ssl/debughelpers.c b/Modules/_ssl/debughelpers.c index 03c125eb44fd3a..08f3457035b90c 100644 --- a/Modules/_ssl/debughelpers.c +++ b/Modules/_ssl/debughelpers.c @@ -87,8 +87,7 @@ _PySSL_msg_callback(int write_p, int version, int content_type, static PyObject * _PySSLContext_get_msg_callback(PySSLContext *self, void *c) { if (self->msg_cb != NULL) { - Py_INCREF(self->msg_cb); - return self->msg_cb; + return Py_NewRef(self->msg_cb); } else { Py_RETURN_NONE; } @@ -107,8 +106,7 @@ _PySSLContext_set_msg_callback(PySSLContext *self, PyObject *arg, void *c) { "not a callable object"); return -1; } - Py_INCREF(arg); - self->msg_cb = arg; + self->msg_cb = Py_NewRef(arg); SSL_CTX_set_msg_callback(self->ctx, _PySSL_msg_callback); } return 0; @@ -166,8 +164,7 @@ _PySSL_keylog_callback(const SSL *ssl, const char *line) static PyObject * _PySSLContext_get_keylog_filename(PySSLContext *self, void *c) { if (self->keylog_filename != NULL) { - Py_INCREF(self->keylog_filename); - return self->keylog_filename; + return Py_NewRef(self->keylog_filename); } else { Py_RETURN_NONE; } @@ -203,8 +200,7 @@ _PySSLContext_set_keylog_filename(PySSLContext *self, PyObject *arg, void *c) { "Can't malloc memory for keylog file"); return -1; } - Py_INCREF(arg); - self->keylog_filename = arg; + self->keylog_filename = Py_NewRef(arg); /* Write a header for seekable, empty files (this excludes pipes). */ PySSL_BEGIN_ALLOW_THREADS diff --git a/Modules/_struct.c b/Modules/_struct.c index f9bac34c1e38b8..3db7b991acd0a1 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -167,9 +167,6 @@ get_long(_structmodulestate *state, PyObject *v, long *p) x = PyLong_AsLong(v); Py_DECREF(v); if (x == (long)-1 && PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - PyErr_SetString(state->StructError, - "argument out of range"); return -1; } *p = x; @@ -191,9 +188,6 @@ get_ulong(_structmodulestate *state, PyObject *v, unsigned long *p) x = PyLong_AsUnsignedLong(v); Py_DECREF(v); if (x == (unsigned long)-1 && PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - PyErr_SetString(state->StructError, - "argument out of range"); return -1; } *p = x; @@ -214,9 +208,6 @@ get_longlong(_structmodulestate *state, PyObject *v, long long *p) x = PyLong_AsLongLong(v); Py_DECREF(v); if (x == (long long)-1 && PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - PyErr_SetString(state->StructError, - "argument out of range"); return -1; } *p = x; @@ -237,9 +228,6 @@ get_ulonglong(_structmodulestate *state, PyObject *v, unsigned long long *p) x = PyLong_AsUnsignedLongLong(v); Py_DECREF(v); if (x == (unsigned long long)-1 && PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - PyErr_SetString(state->StructError, - "argument out of range"); return -1; } *p = x; @@ -260,9 +248,6 @@ get_ssize_t(_structmodulestate *state, PyObject *v, Py_ssize_t *p) x = PyLong_AsSsize_t(v); Py_DECREF(v); if (x == (Py_ssize_t)-1 && PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - PyErr_SetString(state->StructError, - "argument out of range"); return -1; } *p = x; @@ -283,9 +268,6 @@ get_size_t(_structmodulestate *state, PyObject *v, size_t *p) x = PyLong_AsSize_t(v); Py_DECREF(v); if (x == (size_t)-1 && PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - PyErr_SetString(state->StructError, - "argument out of range"); return -1; } *p = x; @@ -293,7 +275,7 @@ get_size_t(_structmodulestate *state, PyObject *v, size_t *p) } -#define RANGE_ERROR(state, x, f, flag, mask) return _range_error(state, f, flag) +#define RANGE_ERROR(state, f, flag) return _range_error(state, f, flag) /* Floating point helpers */ @@ -545,12 +527,14 @@ static int np_byte(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { long x; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 0); + } return -1; + } if (x < -128 || x > 127) { - PyErr_SetString(state->StructError, - "byte format requires -128 <= number <= 127"); - return -1; + RANGE_ERROR(state, f, 0); } *p = (char)x; return 0; @@ -560,12 +544,14 @@ static int np_ubyte(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { long x; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 1); + } return -1; + } if (x < 0 || x > 255) { - PyErr_SetString(state->StructError, - "ubyte format requires 0 <= number <= 255"); - return -1; + RANGE_ERROR(state, f, 1); } *(unsigned char *)p = (unsigned char)x; return 0; @@ -588,13 +574,14 @@ np_short(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { long x; short y; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 0); + } return -1; + } if (x < SHRT_MIN || x > SHRT_MAX) { - PyErr_Format(state->StructError, - "short format requires %d <= number <= %d", - (int)SHRT_MIN, (int)SHRT_MAX); - return -1; + RANGE_ERROR(state, f, 0); } y = (short)x; memcpy(p, (char *)&y, sizeof y); @@ -606,13 +593,14 @@ np_ushort(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { long x; unsigned short y; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 1); + } return -1; + } if (x < 0 || x > USHRT_MAX) { - PyErr_Format(state->StructError, - "ushort format requires 0 <= number <= %u", - (unsigned int)USHRT_MAX); - return -1; + RANGE_ERROR(state, f, 1); } y = (unsigned short)x; memcpy(p, (char *)&y, sizeof y); @@ -624,11 +612,15 @@ np_int(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { long x; int y; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 0); + } return -1; + } #if (SIZEOF_LONG > SIZEOF_INT) if ((x < ((long)INT_MIN)) || (x > ((long)INT_MAX))) - RANGE_ERROR(state, x, f, 0, -1); + RANGE_ERROR(state, f, 0); #endif y = (int)x; memcpy(p, (char *)&y, sizeof y); @@ -640,12 +632,16 @@ np_uint(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { unsigned long x; unsigned int y; - if (get_ulong(state, v, &x) < 0) + if (get_ulong(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 1); + } return -1; + } y = (unsigned int)x; #if (SIZEOF_LONG > SIZEOF_INT) if (x > ((unsigned long)UINT_MAX)) - RANGE_ERROR(state, y, f, 1, -1); + RANGE_ERROR(state, f, 1); #endif memcpy(p, (char *)&y, sizeof y); return 0; @@ -655,8 +651,12 @@ static int np_long(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { long x; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 0); + } return -1; + } memcpy(p, (char *)&x, sizeof x); return 0; } @@ -665,8 +665,12 @@ static int np_ulong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { unsigned long x; - if (get_ulong(state, v, &x) < 0) + if (get_ulong(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 1); + } return -1; + } memcpy(p, (char *)&x, sizeof x); return 0; } @@ -675,8 +679,12 @@ static int np_ssize_t(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { Py_ssize_t x; - if (get_ssize_t(state, v, &x) < 0) + if (get_ssize_t(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 0); + } return -1; + } memcpy(p, (char *)&x, sizeof x); return 0; } @@ -685,8 +693,12 @@ static int np_size_t(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { size_t x; - if (get_size_t(state, v, &x) < 0) + if (get_size_t(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 1); + } return -1; + } memcpy(p, (char *)&x, sizeof x); return 0; } @@ -695,8 +707,16 @@ static int np_longlong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { long long x; - if (get_longlong(state, v, &x) < 0) + if (get_longlong(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + PyErr_Format(state->StructError, + "'%c' format requires %lld <= number <= %lld", + f->format, + LLONG_MIN, + LLONG_MAX); + } return -1; + } memcpy(p, (char *)&x, sizeof x); return 0; } @@ -705,8 +725,15 @@ static int np_ulonglong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) { unsigned long long x; - if (get_ulonglong(state, v, &x) < 0) + if (get_ulonglong(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + PyErr_Format(state->StructError, + "'%c' format requires 0 <= number <= %llu", + f->format, + ULLONG_MAX); + } return -1; + } memcpy(p, (char *)&x, sizeof x); return 0; } @@ -911,15 +938,19 @@ bp_int(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) long x; Py_ssize_t i; unsigned char *q = (unsigned char *)p; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 0); + } return -1; + } i = f->size; if (i != SIZEOF_LONG) { if ((i == 2) && (x < -32768 || x > 32767)) - RANGE_ERROR(state, x, f, 0, 0xffffL); + RANGE_ERROR(state, f, 0); #if (SIZEOF_LONG != 4) else if ((i == 4) && (x < -2147483648L || x > 2147483647L)) - RANGE_ERROR(state, x, f, 0, 0xffffffffL); + RANGE_ERROR(state, f, 0); #endif } do { @@ -935,14 +966,18 @@ bp_uint(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) unsigned long x; Py_ssize_t i; unsigned char *q = (unsigned char *)p; - if (get_ulong(state, v, &x) < 0) + if (get_ulong(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 1); + } return -1; + } i = f->size; if (i != SIZEOF_LONG) { unsigned long maxint = 1; maxint <<= (unsigned long)(i * 8); if (x >= maxint) - RANGE_ERROR(state, x, f, 1, maxint - 1); + RANGE_ERROR(state, f, 1); } do { q[--i] = (unsigned char)(x & 0xffUL); @@ -964,6 +999,14 @@ bp_longlong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) 0, /* little_endian */ 1 /* signed */); Py_DECREF(v); + if (res == -1 && PyErr_Occurred()) { + PyErr_Format(state->StructError, + "'%c' format requires %lld <= number <= %lld", + f->format, + LLONG_MIN, + LLONG_MAX); + return -1; + } return res; } @@ -980,6 +1023,13 @@ bp_ulonglong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f 0, /* little_endian */ 0 /* signed */); Py_DECREF(v); + if (res == -1 && PyErr_Occurred()) { + PyErr_Format(state->StructError, + "'%c' format requires 0 <= number <= %llu", + f->format, + ULLONG_MAX); + return -1; + } return res; } @@ -1148,15 +1198,19 @@ lp_int(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) long x; Py_ssize_t i; unsigned char *q = (unsigned char *)p; - if (get_long(state, v, &x) < 0) + if (get_long(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 0); + } return -1; + } i = f->size; if (i != SIZEOF_LONG) { if ((i == 2) && (x < -32768 || x > 32767)) - RANGE_ERROR(state, x, f, 0, 0xffffL); + RANGE_ERROR(state, f, 0); #if (SIZEOF_LONG != 4) else if ((i == 4) && (x < -2147483648L || x > 2147483647L)) - RANGE_ERROR(state, x, f, 0, 0xffffffffL); + RANGE_ERROR(state, f, 0); #endif } do { @@ -1172,14 +1226,18 @@ lp_uint(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) unsigned long x; Py_ssize_t i; unsigned char *q = (unsigned char *)p; - if (get_ulong(state, v, &x) < 0) + if (get_ulong(state, v, &x) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + RANGE_ERROR(state, f, 1); + } return -1; + } i = f->size; if (i != SIZEOF_LONG) { unsigned long maxint = 1; maxint <<= (unsigned long)(i * 8); if (x >= maxint) - RANGE_ERROR(state, x, f, 1, maxint - 1); + RANGE_ERROR(state, f, 1); } do { *q++ = (unsigned char)(x & 0xffUL); @@ -1201,6 +1259,14 @@ lp_longlong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f) 1, /* little_endian */ 1 /* signed */); Py_DECREF(v); + if (res == -1 && PyErr_Occurred()) { + PyErr_Format(state->StructError, + "'%c' format requires %lld <= number <= %lld", + f->format, + LLONG_MIN, + LLONG_MAX); + return -1; + } return res; } @@ -1217,6 +1283,13 @@ lp_ulonglong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f 1, /* little_endian */ 0 /* signed */); Py_DECREF(v); + if (res == -1 && PyErr_Occurred()) { + PyErr_Format(state->StructError, + "'%c' format requires 0 <= number <= %llu", + f->format, + ULLONG_MAX); + return -1; + } return res; } @@ -1829,8 +1902,7 @@ Struct_iter_unpack(PyStructObject *self, PyObject *buffer) Py_DECREF(iter); return NULL; } - Py_INCREF(self); - iter->so = self; + iter->so = (PyStructObject*)Py_NewRef(self); iter->index = 0; return (PyObject *)iter; } @@ -2091,13 +2163,11 @@ PyDoc_STRVAR(s_sizeof__doc__, static PyObject * s_sizeof(PyStructObject *self, void *unused) { - Py_ssize_t size; - formatcode *code; - - size = _PyObject_SIZE(Py_TYPE(self)) + sizeof(formatcode); - for (code = self->s_codes; code->fmtdef != NULL; code++) + size_t size = _PyObject_SIZE(Py_TYPE(self)) + sizeof(formatcode); + for (formatcode *code = self->s_codes; code->fmtdef != NULL; code++) { size += sizeof(formatcode); - return PyLong_FromSsize_t(size); + } + return PyLong_FromSize_t(size); } /* List of functions */ @@ -2165,8 +2235,7 @@ cache_struct_converter(PyObject *module, PyObject *fmt, PyStructObject **ptr) _structmodulestate *state = get_struct_state(module); if (fmt == NULL) { - Py_DECREF(*ptr); - *ptr = NULL; + Py_SETREF(*ptr, NULL); return 1; } @@ -2178,8 +2247,7 @@ cache_struct_converter(PyObject *module, PyObject *fmt, PyStructObject **ptr) s_object = PyDict_GetItemWithError(state->cache, fmt); if (s_object != NULL) { - Py_INCREF(s_object); - *ptr = (PyStructObject *)s_object; + *ptr = (PyStructObject *)Py_NewRef(s_object); return Py_CLEANUP_SUPPORTED; } else if (PyErr_Occurred()) { diff --git a/Modules/_testbuffer.c b/Modules/_testbuffer.c index eea9d217557e21..63ed4dc6ca80e1 100644 --- a/Modules/_testbuffer.c +++ b/Modules/_testbuffer.c @@ -1524,8 +1524,7 @@ ndarray_getbuf(NDArrayObject *self, Py_buffer *view, int flags) return -1; } - view->obj = (PyObject *)self; - Py_INCREF(view->obj); + view->obj = Py_NewRef(self); self->head->exports++; return 0; @@ -1788,8 +1787,7 @@ ndarray_subscript(NDArrayObject *self, PyObject *key) return unpack_single(base->buf, base->format, base->itemsize); } else if (key == Py_Ellipsis) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar"); @@ -2021,8 +2019,7 @@ ndarray_get_obj(NDArrayObject *self, void *closure) if (base->obj == NULL) { Py_RETURN_NONE; } - Py_INCREF(base->obj); - return base->obj; + return Py_NewRef(base->obj); } static PyObject * @@ -2559,8 +2556,7 @@ cmp_contig(PyObject *self, PyObject *args) PyBuffer_Release(&v2); ret = equal ? Py_True : Py_False; - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } static PyObject * @@ -2597,8 +2593,7 @@ is_contiguous(PyObject *self, PyObject *args) PyBuffer_Release(&view); } - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } static Py_hash_t @@ -2748,8 +2743,7 @@ staticarray_getbuf(StaticArrayObject *self, Py_buffer *view, int flags) view->obj = NULL; /* Don't use this in new code. */ } else { - view->obj = (PyObject *)self; - Py_INCREF(view->obj); + view->obj = Py_NewRef(self); } return 0; diff --git a/Modules/_testcapi/datetime.c b/Modules/_testcapi/datetime.c new file mode 100644 index 00000000000000..88f992915fa8c1 --- /dev/null +++ b/Modules/_testcapi/datetime.c @@ -0,0 +1,451 @@ +#include "parts.h" + +#include "datetime.h" // PyDateTimeAPI + + +static int test_run_counter = 0; + +static PyObject * +test_datetime_capi(PyObject *self, PyObject *args) +{ + if (PyDateTimeAPI) { + if (test_run_counter) { + /* Probably regrtest.py -R */ + Py_RETURN_NONE; + } + else { + PyErr_SetString(PyExc_AssertionError, + "PyDateTime_CAPI somehow initialized"); + return NULL; + } + } + test_run_counter++; + PyDateTime_IMPORT; + + if (PyDateTimeAPI) { + Py_RETURN_NONE; + } + return NULL; +} + +/* Functions exposing the C API type checking for testing */ +#define MAKE_DATETIME_CHECK_FUNC(check_method, exact_method) \ +do { \ + PyObject *obj; \ + int exact = 0; \ + if (!PyArg_ParseTuple(args, "O|p", &obj, &exact)) { \ + return NULL; \ + } \ + int rv = exact?exact_method(obj):check_method(obj); \ + if (rv) { \ + Py_RETURN_TRUE; \ + } \ + Py_RETURN_FALSE; \ +} while (0) \ + +static PyObject * +datetime_check_date(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyDate_Check, PyDate_CheckExact); +} + +static PyObject * +datetime_check_time(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyTime_Check, PyTime_CheckExact); +} + +static PyObject * +datetime_check_datetime(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyDateTime_Check, PyDateTime_CheckExact); +} + +static PyObject * +datetime_check_delta(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyDelta_Check, PyDelta_CheckExact); +} + +static PyObject * +datetime_check_tzinfo(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyTZInfo_Check, PyTZInfo_CheckExact); +} +#undef MAKE_DATETIME_CHECK_FUNC + + +/* Makes three variations on timezone representing UTC-5: + 1. timezone with offset and name from PyDateTimeAPI + 2. timezone with offset and name from PyTimeZone_FromOffsetAndName + 3. timezone with offset (no name) from PyTimeZone_FromOffset +*/ +static PyObject * +make_timezones_capi(PyObject *self, PyObject *args) +{ + PyObject *offset = PyDelta_FromDSU(0, -18000, 0); + PyObject *name = PyUnicode_FromString("EST"); + + PyObject *est_zone_capi = PyDateTimeAPI->TimeZone_FromTimeZone(offset, name); + PyObject *est_zone_macro = PyTimeZone_FromOffsetAndName(offset, name); + PyObject *est_zone_macro_noname = PyTimeZone_FromOffset(offset); + + Py_DecRef(offset); + Py_DecRef(name); + + PyObject *rv = PyTuple_New(3); + if (rv == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(rv, 0, est_zone_capi); + PyTuple_SET_ITEM(rv, 1, est_zone_macro); + PyTuple_SET_ITEM(rv, 2, est_zone_macro_noname); + + return rv; +} + +static PyObject * +get_timezones_offset_zero(PyObject *self, PyObject *args) +{ + PyObject *offset = PyDelta_FromDSU(0, 0, 0); + PyObject *name = PyUnicode_FromString(""); + + // These two should return the UTC singleton + PyObject *utc_singleton_0 = PyTimeZone_FromOffset(offset); + PyObject *utc_singleton_1 = PyTimeZone_FromOffsetAndName(offset, NULL); + + // This one will return +00:00 zone, but not the UTC singleton + PyObject *non_utc_zone = PyTimeZone_FromOffsetAndName(offset, name); + + Py_DecRef(offset); + Py_DecRef(name); + + PyObject *rv = PyTuple_New(3); + PyTuple_SET_ITEM(rv, 0, utc_singleton_0); + PyTuple_SET_ITEM(rv, 1, utc_singleton_1); + PyTuple_SET_ITEM(rv, 2, non_utc_zone); + + return rv; +} + +static PyObject * +get_timezone_utc_capi(PyObject *self, PyObject *args) +{ + int macro = 0; + if (!PyArg_ParseTuple(args, "|p", ¯o)) { + return NULL; + } + if (macro) { + return Py_NewRef(PyDateTime_TimeZone_UTC); + } + return Py_NewRef(PyDateTimeAPI->TimeZone_UTC); +} + +static PyObject * +get_date_fromdate(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int year, month, day; + + if (!PyArg_ParseTuple(args, "piii", ¯o, &year, &month, &day)) { + return NULL; + } + + if (macro) { + rv = PyDate_FromDate(year, month, day); + } + else { + rv = PyDateTimeAPI->Date_FromDate( + year, month, day, + PyDateTimeAPI->DateType); + } + return rv; +} + +static PyObject * +get_datetime_fromdateandtime(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int year, month, day; + int hour, minute, second, microsecond; + + if (!PyArg_ParseTuple(args, "piiiiiii", + ¯o, + &year, &month, &day, + &hour, &minute, &second, µsecond)) { + return NULL; + } + + if (macro) { + rv = PyDateTime_FromDateAndTime( + year, month, day, + hour, minute, second, microsecond); + } + else { + rv = PyDateTimeAPI->DateTime_FromDateAndTime( + year, month, day, + hour, minute, second, microsecond, + Py_None, + PyDateTimeAPI->DateTimeType); + } + return rv; +} + +static PyObject * +get_datetime_fromdateandtimeandfold(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int year, month, day; + int hour, minute, second, microsecond, fold; + + if (!PyArg_ParseTuple(args, "piiiiiiii", + ¯o, + &year, &month, &day, + &hour, &minute, &second, µsecond, + &fold)) { + return NULL; + } + + if (macro) { + rv = PyDateTime_FromDateAndTimeAndFold( + year, month, day, + hour, minute, second, microsecond, + fold); + } + else { + rv = PyDateTimeAPI->DateTime_FromDateAndTimeAndFold( + year, month, day, + hour, minute, second, microsecond, + Py_None, + fold, + PyDateTimeAPI->DateTimeType); + } + return rv; +} + +static PyObject * +get_time_fromtime(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int hour, minute, second, microsecond; + + if (!PyArg_ParseTuple(args, "piiii", + ¯o, + &hour, &minute, &second, µsecond)) + { + return NULL; + } + + if (macro) { + rv = PyTime_FromTime(hour, minute, second, microsecond); + } + else { + rv = PyDateTimeAPI->Time_FromTime( + hour, minute, second, microsecond, + Py_None, + PyDateTimeAPI->TimeType); + } + return rv; +} + +static PyObject * +get_time_fromtimeandfold(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int hour, minute, second, microsecond, fold; + + if (!PyArg_ParseTuple(args, "piiiii", + ¯o, + &hour, &minute, &second, µsecond, + &fold)) { + return NULL; + } + + if (macro) { + rv = PyTime_FromTimeAndFold(hour, minute, second, microsecond, fold); + } + else { + rv = PyDateTimeAPI->Time_FromTimeAndFold( + hour, minute, second, microsecond, + Py_None, + fold, + PyDateTimeAPI->TimeType); + } + return rv; +} + +static PyObject * +get_delta_fromdsu(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int days, seconds, microseconds; + + if (!PyArg_ParseTuple(args, "piii", + ¯o, + &days, &seconds, µseconds)) { + return NULL; + } + + if (macro) { + rv = PyDelta_FromDSU(days, seconds, microseconds); + } + else { + rv = PyDateTimeAPI->Delta_FromDelta( + days, seconds, microseconds, 1, + PyDateTimeAPI->DeltaType); + } + + return rv; +} + +static PyObject * +get_date_fromtimestamp(PyObject *self, PyObject *args) +{ + PyObject *tsargs = NULL, *ts = NULL, *rv = NULL; + int macro = 0; + + if (!PyArg_ParseTuple(args, "O|p", &ts, ¯o)) { + return NULL; + } + + // Construct the argument tuple + if ((tsargs = PyTuple_Pack(1, ts)) == NULL) { + return NULL; + } + + // Pass along to the API function + if (macro) { + rv = PyDate_FromTimestamp(tsargs); + } + else { + rv = PyDateTimeAPI->Date_FromTimestamp( + (PyObject *)PyDateTimeAPI->DateType, tsargs + ); + } + + Py_DECREF(tsargs); + return rv; +} + +static PyObject * +get_datetime_fromtimestamp(PyObject *self, PyObject *args) +{ + int macro = 0; + int usetz = 0; + PyObject *tsargs = NULL, *ts = NULL, *tzinfo = Py_None, *rv = NULL; + if (!PyArg_ParseTuple(args, "OO|pp", &ts, &tzinfo, &usetz, ¯o)) { + return NULL; + } + + // Construct the argument tuple + if (usetz) { + tsargs = PyTuple_Pack(2, ts, tzinfo); + } + else { + tsargs = PyTuple_Pack(1, ts); + } + + if (tsargs == NULL) { + return NULL; + } + + // Pass along to the API function + if (macro) { + rv = PyDateTime_FromTimestamp(tsargs); + } + else { + rv = PyDateTimeAPI->DateTime_FromTimestamp( + (PyObject *)PyDateTimeAPI->DateTimeType, tsargs, NULL + ); + } + + Py_DECREF(tsargs); + return rv; +} + +static PyObject * +test_PyDateTime_GET(PyObject *self, PyObject *obj) +{ + int year, month, day; + + year = PyDateTime_GET_YEAR(obj); + month = PyDateTime_GET_MONTH(obj); + day = PyDateTime_GET_DAY(obj); + + return Py_BuildValue("(iii)", year, month, day); +} + +static PyObject * +test_PyDateTime_DATE_GET(PyObject *self, PyObject *obj) +{ + int hour = PyDateTime_DATE_GET_HOUR(obj); + int minute = PyDateTime_DATE_GET_MINUTE(obj); + int second = PyDateTime_DATE_GET_SECOND(obj); + int microsecond = PyDateTime_DATE_GET_MICROSECOND(obj); + PyObject *tzinfo = PyDateTime_DATE_GET_TZINFO(obj); + + return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); +} + +static PyObject * +test_PyDateTime_TIME_GET(PyObject *self, PyObject *obj) +{ + int hour = PyDateTime_TIME_GET_HOUR(obj); + int minute = PyDateTime_TIME_GET_MINUTE(obj); + int second = PyDateTime_TIME_GET_SECOND(obj); + int microsecond = PyDateTime_TIME_GET_MICROSECOND(obj); + PyObject *tzinfo = PyDateTime_TIME_GET_TZINFO(obj); + + return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); +} + +static PyObject * +test_PyDateTime_DELTA_GET(PyObject *self, PyObject *obj) +{ + int days = PyDateTime_DELTA_GET_DAYS(obj); + int seconds = PyDateTime_DELTA_GET_SECONDS(obj); + int microseconds = PyDateTime_DELTA_GET_MICROSECONDS(obj); + + return Py_BuildValue("(iii)", days, seconds, microseconds); +} + +static PyMethodDef test_methods[] = { + {"PyDateTime_DATE_GET", test_PyDateTime_DATE_GET, METH_O}, + {"PyDateTime_DELTA_GET", test_PyDateTime_DELTA_GET, METH_O}, + {"PyDateTime_GET", test_PyDateTime_GET, METH_O}, + {"PyDateTime_TIME_GET", test_PyDateTime_TIME_GET, METH_O}, + {"datetime_check_date", datetime_check_date, METH_VARARGS}, + {"datetime_check_datetime", datetime_check_datetime, METH_VARARGS}, + {"datetime_check_delta", datetime_check_delta, METH_VARARGS}, + {"datetime_check_time", datetime_check_time, METH_VARARGS}, + {"datetime_check_tzinfo", datetime_check_tzinfo, METH_VARARGS}, + {"get_date_fromdate", get_date_fromdate, METH_VARARGS}, + {"get_date_fromtimestamp", get_date_fromtimestamp, METH_VARARGS}, + {"get_datetime_fromdateandtime", get_datetime_fromdateandtime, METH_VARARGS}, + {"get_datetime_fromdateandtimeandfold", get_datetime_fromdateandtimeandfold, METH_VARARGS}, + {"get_datetime_fromtimestamp", get_datetime_fromtimestamp, METH_VARARGS}, + {"get_delta_fromdsu", get_delta_fromdsu, METH_VARARGS}, + {"get_time_fromtime", get_time_fromtime, METH_VARARGS}, + {"get_time_fromtimeandfold", get_time_fromtimeandfold, METH_VARARGS}, + {"get_timezone_utc_capi", get_timezone_utc_capi, METH_VARARGS}, + {"get_timezones_offset_zero", get_timezones_offset_zero, METH_NOARGS}, + {"make_timezones_capi", make_timezones_capi, METH_NOARGS}, + {"test_datetime_capi", test_datetime_capi, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_DateTime(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapi/docstring.c b/Modules/_testcapi/docstring.c new file mode 100644 index 00000000000000..a997c54a8a69c4 --- /dev/null +++ b/Modules/_testcapi/docstring.c @@ -0,0 +1,107 @@ +#include "parts.h" + + +PyDoc_STRVAR(docstring_empty, +"" +); + +PyDoc_STRVAR(docstring_no_signature, +"This docstring has no signature." +); + +PyDoc_STRVAR(docstring_with_invalid_signature, +"docstring_with_invalid_signature($module, /, boo)\n" +"\n" +"This docstring has an invalid signature." +); + +PyDoc_STRVAR(docstring_with_invalid_signature2, +"docstring_with_invalid_signature2($module, /, boo)\n" +"\n" +"--\n" +"\n" +"This docstring also has an invalid signature." +); + +PyDoc_STRVAR(docstring_with_signature, +"docstring_with_signature($module, /, sig)\n" +"--\n" +"\n" +"This docstring has a valid signature." +); + +PyDoc_STRVAR(docstring_with_signature_but_no_doc, +"docstring_with_signature_but_no_doc($module, /, sig)\n" +"--\n" +"\n" +); + +PyDoc_STRVAR(docstring_with_signature_and_extra_newlines, +"docstring_with_signature_and_extra_newlines($module, /, parameter)\n" +"--\n" +"\n" +"\n" +"This docstring has a valid signature and some extra newlines." +); + +PyDoc_STRVAR(docstring_with_signature_with_defaults, +"docstring_with_signature_with_defaults(module, s='avocado',\n" +" b=b'bytes', d=3.14, i=35, n=None, t=True, f=False,\n" +" local=the_number_three, sys=sys.maxsize,\n" +" exp=sys.maxsize - 1)\n" +"--\n" +"\n" +"\n" +"\n" +"This docstring has a valid signature with parameters,\n" +"and the parameters take defaults of varying types." +); + +/* This is here to provide a docstring for test_descr. */ +static PyObject * +test_with_docstring(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"docstring_empty", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_empty}, + {"docstring_no_signature", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_no_signature}, + {"docstring_with_invalid_signature", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_invalid_signature}, + {"docstring_with_invalid_signature2", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_invalid_signature2}, + {"docstring_with_signature", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature}, + {"docstring_with_signature_and_extra_newlines", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature_and_extra_newlines}, + {"docstring_with_signature_but_no_doc", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature_but_no_doc}, + {"docstring_with_signature_with_defaults", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature_with_defaults}, + {"no_docstring", + (PyCFunction)test_with_docstring, METH_NOARGS}, + {"test_with_docstring", + test_with_docstring, METH_NOARGS, + PyDoc_STR("This is a pretty normal docstring.")}, + {NULL}, +}; + +int +_PyTestCapi_Init_Docstring(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapi/float.c b/Modules/_testcapi/float.c new file mode 100644 index 00000000000000..26d99d990e0042 --- /dev/null +++ b/Modules/_testcapi/float.c @@ -0,0 +1,98 @@ +#define PY_SSIZE_T_CLEAN + +#include "parts.h" + + +// Test PyFloat_Pack2(), PyFloat_Pack4() and PyFloat_Pack8() +static PyObject * +test_float_pack(PyObject *self, PyObject *args) +{ + int size; + double d; + int le; + if (!PyArg_ParseTuple(args, "idi", &size, &d, &le)) { + return NULL; + } + switch (size) + { + case 2: + { + char data[2]; + if (PyFloat_Pack2(d, data, le) < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); + } + case 4: + { + char data[4]; + if (PyFloat_Pack4(d, data, le) < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); + } + case 8: + { + char data[8]; + if (PyFloat_Pack8(d, data, le) < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); + } + default: break; + } + + PyErr_SetString(PyExc_ValueError, "size must 2, 4 or 8"); + return NULL; +} + + +// Test PyFloat_Unpack2(), PyFloat_Unpack4() and PyFloat_Unpack8() +static PyObject * +test_float_unpack(PyObject *self, PyObject *args) +{ + assert(!PyErr_Occurred()); + const char *data; + Py_ssize_t size; + int le; + if (!PyArg_ParseTuple(args, "y#i", &data, &size, &le)) { + return NULL; + } + double d; + switch (size) + { + case 2: + d = PyFloat_Unpack2(data, le); + break; + case 4: + d = PyFloat_Unpack4(data, le); + break; + case 8: + d = PyFloat_Unpack8(data, le); + break; + default: + PyErr_SetString(PyExc_ValueError, "data length must 2, 4 or 8 bytes"); + return NULL; + } + + if (d == -1.0 && PyErr_Occurred()) { + return NULL; + } + return PyFloat_FromDouble(d); +} + +static PyMethodDef test_methods[] = { + {"float_pack", test_float_pack, METH_VARARGS, NULL}, + {"float_unpack", test_float_unpack, METH_VARARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Float(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/getargs.c b/Modules/_testcapi/getargs.c new file mode 100644 index 00000000000000..25a8e5fe8b605b --- /dev/null +++ b/Modules/_testcapi/getargs.c @@ -0,0 +1,920 @@ +/* + * Tests for Python/getargs.c and Python/modsupport.c; + * APIs that parse and build arguments. + */ + +#define PY_SSIZE_T_CLEAN + +#include "parts.h" + +static PyObject * +parse_tuple_and_keywords(PyObject *self, PyObject *args) +{ + PyObject *sub_args; + PyObject *sub_kwargs; + const char *sub_format; + PyObject *sub_keywords; + + double buffers[8][4]; /* double ensures alignment where necessary */ + PyObject *converted[8]; + char *keywords[8 + 1]; /* space for NULL at end */ + + PyObject *return_value = NULL; + + if (!PyArg_ParseTuple(args, "OOsO:parse_tuple_and_keywords", + &sub_args, &sub_kwargs, &sub_format, &sub_keywords)) + { + return NULL; + } + + if (!(PyList_CheckExact(sub_keywords) || + PyTuple_CheckExact(sub_keywords))) + { + PyErr_SetString(PyExc_ValueError, + "parse_tuple_and_keywords: " + "sub_keywords must be either list or tuple"); + return NULL; + } + + memset(buffers, 0, sizeof(buffers)); + memset(converted, 0, sizeof(converted)); + memset(keywords, 0, sizeof(keywords)); + + Py_ssize_t size = PySequence_Fast_GET_SIZE(sub_keywords); + if (size > 8) { + PyErr_SetString(PyExc_ValueError, + "parse_tuple_and_keywords: too many keywords in sub_keywords"); + goto exit; + } + + for (Py_ssize_t i = 0; i < size; i++) { + PyObject *o = PySequence_Fast_GET_ITEM(sub_keywords, i); + if (!PyUnicode_FSConverter(o, (void *)(converted + i))) { + PyErr_Format(PyExc_ValueError, + "parse_tuple_and_keywords: " + "could not convert keywords[%zd] to narrow string", i); + goto exit; + } + keywords[i] = PyBytes_AS_STRING(converted[i]); + } + + int result = PyArg_ParseTupleAndKeywords(sub_args, sub_kwargs, + sub_format, keywords, + buffers + 0, buffers + 1, buffers + 2, buffers + 3, + buffers + 4, buffers + 5, buffers + 6, buffers + 7); + + if (result) { + return_value = Py_NewRef(Py_None); + } + +exit: + size = sizeof(converted) / sizeof(converted[0]); + for (Py_ssize_t i = 0; i < size; i++) { + Py_XDECREF(converted[i]); + } + return return_value; +} + +static PyObject * +get_args(PyObject *self, PyObject *args) +{ + if (args == NULL) { + args = Py_None; + } + return Py_NewRef(args); +} + +static PyObject * +get_kwargs(PyObject *self, PyObject *args, PyObject *kwargs) +{ + if (kwargs == NULL) { + kwargs = Py_None; + } + return Py_NewRef(kwargs); +} + +static PyObject * +getargs_w_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + + if (!PyArg_ParseTuple(args, "w*:getargs_w_star", &buffer)) { + return NULL; + } + + if (2 <= buffer.len) { + char *str = buffer.buf; + str[0] = '['; + str[buffer.len-1] = ']'; + } + + PyObject *result = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + PyBuffer_Release(&buffer); + return result; +} + +static PyObject * +test_empty_argparse(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + /* Test that formats can begin with '|'. See issue #4720. */ + PyObject *dict = NULL; + static char *kwlist[] = {NULL}; + PyObject *tuple = PyTuple_New(0); + if (!tuple) { + return NULL; + } + int result; + if (!(result = PyArg_ParseTuple(tuple, "|:test_empty_argparse"))) { + goto done; + } + dict = PyDict_New(); + if (!dict) { + goto done; + } + result = PyArg_ParseTupleAndKeywords(tuple, dict, "|:test_empty_argparse", + kwlist); + done: + Py_DECREF(tuple); + Py_XDECREF(dict); + if (!result) { + return NULL; + } + Py_RETURN_NONE; +} + +/* Test tuple argument processing */ +static PyObject * +getargs_tuple(PyObject *self, PyObject *args) +{ + int a, b, c; + if (!PyArg_ParseTuple(args, "i(ii)", &a, &b, &c)) { + return NULL; + } + return Py_BuildValue("iii", a, b, c); +} + +/* test PyArg_ParseTupleAndKeywords */ +static PyObject * +getargs_keywords(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"arg1","arg2","arg3","arg4","arg5", NULL}; + static const char fmt[] = "(ii)i|(i(ii))(iii)i"; + int int_args[10] = {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, + &int_args[0], &int_args[1], &int_args[2], &int_args[3], &int_args[4], + &int_args[5], &int_args[6], &int_args[7], &int_args[8], &int_args[9])) + { + return NULL; + } + return Py_BuildValue("iiiiiiiiii", + int_args[0], int_args[1], int_args[2], int_args[3], int_args[4], + int_args[5], int_args[6], int_args[7], int_args[8], int_args[9]); +} + +/* test PyArg_ParseTupleAndKeywords keyword-only arguments */ +static PyObject * +getargs_keyword_only(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"required", "optional", "keyword_only", NULL}; + int required = -1; + int optional = -1; + int keyword_only = -1; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|i$i", keywords, + &required, &optional, &keyword_only)) + { + return NULL; + } + return Py_BuildValue("iii", required, optional, keyword_only); +} + +/* test PyArg_ParseTupleAndKeywords positional-only arguments */ +static PyObject * +getargs_positional_only_and_keywords(PyObject *self, PyObject *args, + PyObject *kwargs) +{ + static char *keywords[] = {"", "", "keyword", NULL}; + int required = -1; + int optional = -1; + int keyword = -1; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|ii", keywords, + &required, &optional, &keyword)) + { + return NULL; + } + return Py_BuildValue("iii", required, optional, keyword); +} + +/* Functions to call PyArg_ParseTuple with integer format codes, + and return the result. +*/ +static PyObject * +getargs_b(PyObject *self, PyObject *args) +{ + unsigned char value; + if (!PyArg_ParseTuple(args, "b", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_B(PyObject *self, PyObject *args) +{ + unsigned char value; + if (!PyArg_ParseTuple(args, "B", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_h(PyObject *self, PyObject *args) +{ + short value; + if (!PyArg_ParseTuple(args, "h", &value)) { + return NULL; + } + return PyLong_FromLong((long)value); +} + +static PyObject * +getargs_H(PyObject *self, PyObject *args) +{ + unsigned short value; + if (!PyArg_ParseTuple(args, "H", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_I(PyObject *self, PyObject *args) +{ + unsigned int value; + if (!PyArg_ParseTuple(args, "I", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_k(PyObject *self, PyObject *args) +{ + unsigned long value; + if (!PyArg_ParseTuple(args, "k", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong(value); +} + +static PyObject * +getargs_i(PyObject *self, PyObject *args) +{ + int value; + if (!PyArg_ParseTuple(args, "i", &value)) { + return NULL; + } + return PyLong_FromLong((long)value); +} + +static PyObject * +getargs_l(PyObject *self, PyObject *args) +{ + long value; + if (!PyArg_ParseTuple(args, "l", &value)) { + return NULL; + } + return PyLong_FromLong(value); +} + +static PyObject * +getargs_n(PyObject *self, PyObject *args) +{ + Py_ssize_t value; + if (!PyArg_ParseTuple(args, "n", &value)) { + return NULL; + } + return PyLong_FromSsize_t(value); +} + +static PyObject * +getargs_p(PyObject *self, PyObject *args) +{ + int value; + if (!PyArg_ParseTuple(args, "p", &value)) { + return NULL; + } + return PyLong_FromLong(value); +} + +static PyObject * +getargs_L(PyObject *self, PyObject *args) +{ + long long value; + if (!PyArg_ParseTuple(args, "L", &value)) { + return NULL; + } + return PyLong_FromLongLong(value); +} + +static PyObject * +getargs_K(PyObject *self, PyObject *args) +{ + unsigned long long value; + if (!PyArg_ParseTuple(args, "K", &value)) { + return NULL; + } + return PyLong_FromUnsignedLongLong(value); +} + +/* This function not only tests the 'k' getargs code, but also the + PyLong_AsUnsignedLongMask() function. */ +static PyObject * +test_k_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *tuple, *num; + unsigned long value; + + tuple = PyTuple_New(1); + if (tuple == NULL) { + return NULL; + } + + /* a number larger than ULONG_MAX even on 64-bit platforms */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) { + return NULL; + } + + value = PyLong_AsUnsignedLongMask(num); + if (value != ULONG_MAX) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: " + "PyLong_AsUnsignedLongMask() returned wrong value for long 0xFFF...FFF"); + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = 0; + if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { + return NULL; + } + if (value != ULONG_MAX) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: k code returned wrong value for long 0xFFF...FFF"); + return NULL; + } + + Py_DECREF(num); + num = PyLong_FromString("-FFFFFFFF000000000000000042", NULL, 16); + if (num == NULL) { + return NULL; + } + + value = PyLong_AsUnsignedLongMask(num); + if (value != (unsigned long)-0x42) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: " + "PyLong_AsUnsignedLongMask() returned wrong value for long -0xFFF..000042"); + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = 0; + if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { + return NULL; + } + if (value != (unsigned long)-0x42) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: k code returned wrong value for long -0xFFF..000042"); + return NULL; + } + + Py_DECREF(tuple); + Py_RETURN_NONE; +} + +static PyObject * +getargs_f(PyObject *self, PyObject *args) +{ + float f; + if (!PyArg_ParseTuple(args, "f", &f)) { + return NULL; + } + return PyFloat_FromDouble(f); +} + +static PyObject * +getargs_d(PyObject *self, PyObject *args) +{ + double d; + if (!PyArg_ParseTuple(args, "d", &d)) { + return NULL; + } + return PyFloat_FromDouble(d); +} + +static PyObject * +getargs_D(PyObject *self, PyObject *args) +{ + Py_complex cval; + if (!PyArg_ParseTuple(args, "D", &cval)) { + return NULL; + } + return PyComplex_FromCComplex(cval); +} + +static PyObject * +getargs_S(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "S", &obj)) { + return NULL; + } + return Py_NewRef(obj); +} + +static PyObject * +getargs_Y(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "Y", &obj)) { + return NULL; + } + return Py_NewRef(obj); +} + +static PyObject * +getargs_U(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "U", &obj)) { + return NULL; + } + return Py_NewRef(obj); +} + +static PyObject * +getargs_c(PyObject *self, PyObject *args) +{ + char c; + if (!PyArg_ParseTuple(args, "c", &c)) { + return NULL; + } + return PyLong_FromLong((unsigned char)c); +} + +static PyObject * +getargs_C(PyObject *self, PyObject *args) +{ + int c; + if (!PyArg_ParseTuple(args, "C", &c)) { + return NULL; + } + return PyLong_FromLong(c); +} + +static PyObject * +getargs_s(PyObject *self, PyObject *args) +{ + char *str; + if (!PyArg_ParseTuple(args, "s", &str)) { + return NULL; + } + return PyBytes_FromString(str); +} + +static PyObject * +getargs_s_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + PyObject *bytes; + if (!PyArg_ParseTuple(args, "s*", &buffer)) { + return NULL; + } + bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + PyBuffer_Release(&buffer); + return bytes; +} + +static PyObject * +getargs_s_hash(PyObject *self, PyObject *args) +{ + char *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "s#", &str, &size)) { + return NULL; + } + return PyBytes_FromStringAndSize(str, size); +} + +static PyObject * +getargs_z(PyObject *self, PyObject *args) +{ + char *str; + if (!PyArg_ParseTuple(args, "z", &str)) { + return NULL; + } + if (str != NULL) { + return PyBytes_FromString(str); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_z_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + PyObject *bytes; + if (!PyArg_ParseTuple(args, "z*", &buffer)) { + return NULL; + } + if (buffer.buf != NULL) { + bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + } + else { + bytes = Py_NewRef(Py_None); + } + PyBuffer_Release(&buffer); + return bytes; +} + +static PyObject * +getargs_z_hash(PyObject *self, PyObject *args) +{ + char *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &str, &size)) { + return NULL; + } + if (str != NULL) { + return PyBytes_FromStringAndSize(str, size); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_y(PyObject *self, PyObject *args) +{ + char *str; + if (!PyArg_ParseTuple(args, "y", &str)) { + return NULL; + } + return PyBytes_FromString(str); +} + +static PyObject * +getargs_y_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + if (!PyArg_ParseTuple(args, "y*", &buffer)) { + return NULL; + } + PyObject *bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + PyBuffer_Release(&buffer); + return bytes; +} + +static PyObject * +getargs_y_hash(PyObject *self, PyObject *args) +{ + char *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "y#", &str, &size)) { + return NULL; + } + return PyBytes_FromStringAndSize(str, size); +} + +static PyObject * +getargs_u(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + if (!PyArg_ParseTuple(args, "u", &str)) { + return NULL; + } + return PyUnicode_FromWideChar(str, -1); +} + +static PyObject * +getargs_u_hash(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "u#", &str, &size)) { + return NULL; + } + return PyUnicode_FromWideChar(str, size); +} + +static PyObject * +getargs_Z(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + if (!PyArg_ParseTuple(args, "Z", &str)) { + return NULL; + } + if (str != NULL) { + return PyUnicode_FromWideChar(str, -1); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_Z_hash(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Z#", &str, &size)) { + return NULL; + } + if (str != NULL) { + return PyUnicode_FromWideChar(str, size); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_es(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + char *str; + + if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) { + return NULL; + } + if (!PyArg_Parse(arg, "es", encoding, &str)) { + return NULL; + } + PyObject *result = PyBytes_FromString(str); + PyMem_Free(str); + return result; +} + +static PyObject * +getargs_et(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + char *str; + + if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) { + return NULL; + } + if (!PyArg_Parse(arg, "et", encoding, &str)) { + return NULL; + } + PyObject *result = PyBytes_FromString(str); + PyMem_Free(str); + return result; +} + +static PyObject * +getargs_es_hash(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + PyByteArrayObject *buffer = NULL; + char *str = NULL; + Py_ssize_t size; + + if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) { + return NULL; + } + if (buffer != NULL) { + str = PyByteArray_AS_STRING(buffer); + size = PyByteArray_GET_SIZE(buffer); + } + if (!PyArg_Parse(arg, "es#", encoding, &str, &size)) { + return NULL; + } + PyObject *result = PyBytes_FromStringAndSize(str, size); + if (buffer == NULL) { + PyMem_Free(str); + } + return result; +} + +static PyObject * +getargs_et_hash(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + PyByteArrayObject *buffer = NULL; + char *str = NULL; + Py_ssize_t size; + + if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) { + return NULL; + } + if (buffer != NULL) { + str = PyByteArray_AS_STRING(buffer); + size = PyByteArray_GET_SIZE(buffer); + } + if (!PyArg_Parse(arg, "et#", encoding, &str, &size)) { + return NULL; + } + PyObject *result = PyBytes_FromStringAndSize(str, size); + if (buffer == NULL) { + PyMem_Free(str); + } + return result; +} + +/* Test the L code for PyArg_ParseTuple. This should deliver a long long + for both long and int arguments. The test may leak a little memory if + it fails. +*/ +static PyObject * +test_L_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *tuple, *num; + long long value; + + tuple = PyTuple_New(1); + if (tuple == NULL) { + return NULL; + } + + num = PyLong_FromLong(42); + if (num == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = -1; + if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { + return NULL; + } + if (value != 42) { + PyErr_SetString(PyExc_AssertionError, + "test_L_code: L code returned wrong value for long 42"); + return NULL; + } + + Py_DECREF(num); + num = PyLong_FromLong(42); + if (num == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = -1; + if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { + return NULL; + } + if (value != 42) { + PyErr_SetString(PyExc_AssertionError, + "test_L_code: L code returned wrong value for int 42"); + return NULL; + } + + Py_DECREF(tuple); + Py_RETURN_NONE; +} + +/* Test the s and z codes for PyArg_ParseTuple. +*/ +static PyObject * +test_s_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + /* Unicode strings should be accepted */ + PyObject *tuple = PyTuple_New(1); + if (tuple == NULL) { + return NULL; + } + + PyObject *obj = PyUnicode_Decode("t\xeate", strlen("t\xeate"), + "latin-1", NULL); + if (obj == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, obj); + + /* These two blocks used to raise a TypeError: + * "argument must be string without null bytes, not str" + */ + char *value; + if (!PyArg_ParseTuple(tuple, "s:test_s_code1", &value)) { + return NULL; + } + + if (!PyArg_ParseTuple(tuple, "z:test_s_code2", &value)) { + return NULL; + } + + Py_DECREF(tuple); + Py_RETURN_NONE; +} + +#undef PyArg_ParseTupleAndKeywords +PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *, + const char *, char **, ...); + +static PyObject * +getargs_s_hash_int(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"", "", "x", NULL}; + Py_buffer buf = {NULL}; + const char *s; + int len; + int i = 0; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|s#i", keywords, + &buf, &s, &len, &i)) + { + return NULL; + } + PyBuffer_Release(&buf); + Py_RETURN_NONE; +} + +static PyObject * +getargs_s_hash_int2(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"", "", "x", NULL}; + Py_buffer buf = {NULL}; + const char *s; + int len; + int i = 0; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|(s#)i", keywords, + &buf, &s, &len, &i)) + { + return NULL; + } + PyBuffer_Release(&buf); + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"get_args", get_args, METH_VARARGS}, + {"get_kwargs", _PyCFunction_CAST(get_kwargs), METH_VARARGS|METH_KEYWORDS}, + {"getargs_B", getargs_B, METH_VARARGS}, + {"getargs_C", getargs_C, METH_VARARGS}, + {"getargs_D", getargs_D, METH_VARARGS}, + {"getargs_H", getargs_H, METH_VARARGS}, + {"getargs_I", getargs_I, METH_VARARGS}, + {"getargs_K", getargs_K, METH_VARARGS}, + {"getargs_L", getargs_L, METH_VARARGS}, + {"getargs_S", getargs_S, METH_VARARGS}, + {"getargs_U", getargs_U, METH_VARARGS}, + {"getargs_Y", getargs_Y, METH_VARARGS}, + {"getargs_Z", getargs_Z, METH_VARARGS}, + {"getargs_Z_hash", getargs_Z_hash, METH_VARARGS}, + {"getargs_b", getargs_b, METH_VARARGS}, + {"getargs_c", getargs_c, METH_VARARGS}, + {"getargs_d", getargs_d, METH_VARARGS}, + {"getargs_es", getargs_es, METH_VARARGS}, + {"getargs_es_hash", getargs_es_hash, METH_VARARGS}, + {"getargs_et", getargs_et, METH_VARARGS}, + {"getargs_et_hash", getargs_et_hash, METH_VARARGS}, + {"getargs_f", getargs_f, METH_VARARGS}, + {"getargs_h", getargs_h, METH_VARARGS}, + {"getargs_i", getargs_i, METH_VARARGS}, + {"getargs_k", getargs_k, METH_VARARGS}, + {"getargs_keyword_only", _PyCFunction_CAST(getargs_keyword_only), METH_VARARGS|METH_KEYWORDS}, + {"getargs_keywords", _PyCFunction_CAST(getargs_keywords), METH_VARARGS|METH_KEYWORDS}, + {"getargs_l", getargs_l, METH_VARARGS}, + {"getargs_n", getargs_n, METH_VARARGS}, + {"getargs_p", getargs_p, METH_VARARGS}, + {"getargs_positional_only_and_keywords", _PyCFunction_CAST(getargs_positional_only_and_keywords), METH_VARARGS|METH_KEYWORDS}, + {"getargs_s", getargs_s, METH_VARARGS}, + {"getargs_s_hash", getargs_s_hash, METH_VARARGS}, + {"getargs_s_hash_int", _PyCFunction_CAST(getargs_s_hash_int), METH_VARARGS|METH_KEYWORDS}, + {"getargs_s_hash_int2", _PyCFunction_CAST(getargs_s_hash_int2), METH_VARARGS|METH_KEYWORDS}, + {"getargs_s_star", getargs_s_star, METH_VARARGS}, + {"getargs_tuple", getargs_tuple, METH_VARARGS}, + {"getargs_u", getargs_u, METH_VARARGS}, + {"getargs_u_hash", getargs_u_hash, METH_VARARGS}, + {"getargs_w_star", getargs_w_star, METH_VARARGS}, + {"getargs_y", getargs_y, METH_VARARGS}, + {"getargs_y_hash", getargs_y_hash, METH_VARARGS}, + {"getargs_y_star", getargs_y_star, METH_VARARGS}, + {"getargs_z", getargs_z, METH_VARARGS}, + {"getargs_z_hash", getargs_z_hash, METH_VARARGS}, + {"getargs_z_star", getargs_z_star, METH_VARARGS}, + {"parse_tuple_and_keywords", parse_tuple_and_keywords, METH_VARARGS}, + {"test_L_code", test_L_code, METH_NOARGS}, + {"test_empty_argparse", test_empty_argparse, METH_NOARGS}, + {"test_k_code", test_k_code, METH_NOARGS}, + {"test_s_code", test_s_code, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_GetArgs(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/long.c b/Modules/_testcapi/long.c new file mode 100644 index 00000000000000..1be8de5e576254 --- /dev/null +++ b/Modules/_testcapi/long.c @@ -0,0 +1,557 @@ +#include "parts.h" + + +static PyObject * +raiseTestError(const char* test_name, const char* msg) +{ + PyErr_Format(PyExc_AssertionError, "%s: %s", test_name, msg); + return NULL; +} + +/* Tests of PyLong_{As, From}{Unsigned,}Long(), and + PyLong_{As, From}{Unsigned,}LongLong(). + + Note that the meat of the test is contained in testcapi_long.h. + This is revolting, but delicate code duplication is worse: "almost + exactly the same" code is needed to test long long, but the ubiquitous + dependence on type names makes it impossible to use a parameterized + function. A giant macro would be even worse than this. A C++ template + would be perfect. + + The "report an error" functions are deliberately not part of the #include + file: if the test fails, you can set a breakpoint in the appropriate + error function directly, and crawl back from there in the debugger. +*/ + +#define UNBIND(X) Py_DECREF(X); (X) = NULL + +static PyObject * +raise_test_long_error(const char* msg) +{ + return raiseTestError("test_long_api", msg); +} + +#define TESTNAME test_long_api_inner +#define TYPENAME long +#define F_S_TO_PY PyLong_FromLong +#define F_PY_TO_S PyLong_AsLong +#define F_U_TO_PY PyLong_FromUnsignedLong +#define F_PY_TO_U PyLong_AsUnsignedLong + +#include "testcapi_long.h" + +static PyObject * +test_long_api(PyObject* self, PyObject *Py_UNUSED(ignored)) +{ + return TESTNAME(raise_test_long_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + +static PyObject * +raise_test_longlong_error(const char* msg) +{ + return raiseTestError("test_longlong_api", msg); +} + +#define TESTNAME test_longlong_api_inner +#define TYPENAME long long +#define F_S_TO_PY PyLong_FromLongLong +#define F_PY_TO_S PyLong_AsLongLong +#define F_U_TO_PY PyLong_FromUnsignedLongLong +#define F_PY_TO_U PyLong_AsUnsignedLongLong + +#include "testcapi_long.h" + +static PyObject * +test_longlong_api(PyObject* self, PyObject *args) +{ + return TESTNAME(raise_test_longlong_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + +/* Test the PyLong_AsLongAndOverflow API. General conversion to PY_LONG + is tested by test_long_api_inner. This test will concentrate on proper + handling of overflow. +*/ + +static PyObject * +test_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *num, *one, *temp; + long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than LONG_MAX even on 64-bit platforms */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = LONG_MAX + 1 */ + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than LONG_MIN even on 64-bit platforms */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = LONG_MIN - 1 */ + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MAX) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MIN) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + Py_RETURN_NONE; +} + +/* Test the PyLong_AsLongLongAndOverflow API. General conversion to + long long is tested by test_long_api_inner. This test will + concentrate on proper handling of overflow. +*/ + +static PyObject * +test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *num, *one, *temp; + long long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than LLONG_MAX on a typical machine. */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = LLONG_MAX + 1 */ + num = PyLong_FromLongLong(LLONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than LLONG_MIN on a typical platform */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = LLONG_MIN - 1 */ + num = PyLong_FromLongLong(LLONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLongLong(LLONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LLONG_MAX) + return raiseTestError("test_long_long_and_overflow", + "expected return value LLONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLongLong(LLONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LLONG_MIN) + return raiseTestError("test_long_long_and_overflow", + "expected return value LLONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + Py_RETURN_NONE; +} + +/* Test the PyLong_As{Size,Ssize}_t API. At present this just tests that + non-integer arguments are handled correctly. It should be extended to + test overflow handling. + */ + +static PyObject * +test_long_as_size_t(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + size_t out_u; + Py_ssize_t out_s; + + Py_INCREF(Py_None); + + out_u = PyLong_AsSize_t(Py_None); + if (out_u != (size_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + out_s = PyLong_AsSsize_t(Py_None); + if (out_s != (Py_ssize_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +static PyObject * +test_long_as_unsigned_long_long_mask(PyObject *self, + PyObject *Py_UNUSED(ignored)) +{ + unsigned long long res = PyLong_AsUnsignedLongLongMask(NULL); + + if (res != (unsigned long long)-1 || !PyErr_Occurred()) { + return raiseTestError("test_long_as_unsigned_long_long_mask", + "PyLong_AsUnsignedLongLongMask(NULL) didn't " + "complain"); + } + if (!PyErr_ExceptionMatches(PyExc_SystemError)) { + return raiseTestError("test_long_as_unsigned_long_long_mask", + "PyLong_AsUnsignedLongLongMask(NULL) raised " + "something other than SystemError"); + } + PyErr_Clear(); + Py_RETURN_NONE; +} + +/* Test the PyLong_AsDouble API. At present this just tests that + non-integer arguments are handled correctly. + */ + +static PyObject * +test_long_as_double(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + double out; + + Py_INCREF(Py_None); + + out = PyLong_AsDouble(Py_None); + if (out != -1.0 || !PyErr_Occurred()) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +/* Simple test of _PyLong_NumBits and _PyLong_Sign. */ +static PyObject * +test_long_numbits(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + struct triple { + long input; + size_t nbits; + int sign; + } testcases[] = {{0, 0, 0}, + {1L, 1, 1}, + {-1L, 1, -1}, + {2L, 2, 1}, + {-2L, 2, -1}, + {3L, 2, 1}, + {-3L, 2, -1}, + {4L, 3, 1}, + {-4L, 3, -1}, + {0x7fffL, 15, 1}, /* one Python int digit */ + {-0x7fffL, 15, -1}, + {0xffffL, 16, 1}, + {-0xffffL, 16, -1}, + {0xfffffffL, 28, 1}, + {-0xfffffffL, 28, -1}}; + size_t i; + + for (i = 0; i < Py_ARRAY_LENGTH(testcases); ++i) { + size_t nbits; + int sign; + PyObject *plong; + + plong = PyLong_FromLong(testcases[i].input); + if (plong == NULL) + return NULL; + nbits = _PyLong_NumBits(plong); + sign = _PyLong_Sign(plong); + + Py_DECREF(plong); + if (nbits != testcases[i].nbits) + return raiseTestError("test_long_numbits", + "wrong result for _PyLong_NumBits"); + if (sign != testcases[i].sign) + return raiseTestError("test_long_numbits", + "wrong result for _PyLong_Sign"); + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_long_and_overflow", test_long_and_overflow, METH_NOARGS}, + {"test_long_api", test_long_api, METH_NOARGS}, + {"test_long_as_double", test_long_as_double, METH_NOARGS}, + {"test_long_as_size_t", test_long_as_size_t, METH_NOARGS}, + {"test_long_as_unsigned_long_long_mask", test_long_as_unsigned_long_long_mask, METH_NOARGS}, + {"test_long_long_and_overflow",test_long_long_and_overflow, METH_NOARGS}, + {"test_long_numbits", test_long_numbits, METH_NOARGS}, + {"test_longlong_api", test_longlong_api, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Long(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c new file mode 100644 index 00000000000000..ef723bf0658533 --- /dev/null +++ b/Modules/_testcapi/mem.c @@ -0,0 +1,643 @@ +#include "parts.h" + +#include <stddef.h> + + +typedef struct { + PyMemAllocatorEx alloc; + + size_t malloc_size; + size_t calloc_nelem; + size_t calloc_elsize; + void *realloc_ptr; + size_t realloc_new_size; + void *free_ptr; + void *ctx; +} alloc_hook_t; + +static void * +hook_malloc(void *ctx, size_t size) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->malloc_size = size; + return hook->alloc.malloc(hook->alloc.ctx, size); +} + +static void * +hook_calloc(void *ctx, size_t nelem, size_t elsize) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->calloc_nelem = nelem; + hook->calloc_elsize = elsize; + return hook->alloc.calloc(hook->alloc.ctx, nelem, elsize); +} + +static void * +hook_realloc(void *ctx, void *ptr, size_t new_size) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->realloc_ptr = ptr; + hook->realloc_new_size = new_size; + return hook->alloc.realloc(hook->alloc.ctx, ptr, new_size); +} + +static void +hook_free(void *ctx, void *ptr) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->free_ptr = ptr; + hook->alloc.free(hook->alloc.ctx, ptr); +} + +/* Most part of the following code is inherited from the pyfailmalloc project + * written by Victor Stinner. */ +static struct { + int installed; + PyMemAllocatorEx raw; + PyMemAllocatorEx mem; + PyMemAllocatorEx obj; +} FmHook; + +static struct { + int start; + int stop; + Py_ssize_t count; +} FmData; + +static int +fm_nomemory(void) +{ + FmData.count++; + if (FmData.count > FmData.start && + (FmData.stop <= 0 || FmData.count <= FmData.stop)) + { + return 1; + } + return 0; +} + +static void * +hook_fmalloc(void *ctx, size_t size) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + if (fm_nomemory()) { + return NULL; + } + return alloc->malloc(alloc->ctx, size); +} + +static void * +hook_fcalloc(void *ctx, size_t nelem, size_t elsize) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + if (fm_nomemory()) { + return NULL; + } + return alloc->calloc(alloc->ctx, nelem, elsize); +} + +static void * +hook_frealloc(void *ctx, void *ptr, size_t new_size) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + if (fm_nomemory()) { + return NULL; + } + return alloc->realloc(alloc->ctx, ptr, new_size); +} + +static void +hook_ffree(void *ctx, void *ptr) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + alloc->free(alloc->ctx, ptr); +} + +static void +fm_setup_hooks(void) +{ + if (FmHook.installed) { + return; + } + FmHook.installed = 1; + + PyMemAllocatorEx alloc; + alloc.malloc = hook_fmalloc; + alloc.calloc = hook_fcalloc; + alloc.realloc = hook_frealloc; + alloc.free = hook_ffree; + PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); + PyMem_GetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); + PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); + + alloc.ctx = &FmHook.raw; + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); + + alloc.ctx = &FmHook.mem; + PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &alloc); + + alloc.ctx = &FmHook.obj; + PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); +} + +static void +fm_remove_hooks(void) +{ + if (FmHook.installed) { + FmHook.installed = 0; + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); + PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); + PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); + } +} + +static PyObject * +set_nomemory(PyObject *self, PyObject *args) +{ + /* Memory allocation fails after 'start' allocation requests, and until + * 'stop' allocation requests except when 'stop' is negative or equal + * to 0 (default) in which case allocation failures never stop. */ + FmData.count = 0; + FmData.stop = 0; + if (!PyArg_ParseTuple(args, "i|i", &FmData.start, &FmData.stop)) { + return NULL; + } + fm_setup_hooks(); + Py_RETURN_NONE; +} + +static PyObject * +remove_mem_hooks(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + fm_remove_hooks(); + Py_RETURN_NONE; +} + +static PyObject * +test_setallocators(PyMemAllocatorDomain domain) +{ + PyObject *res = NULL; + const char *error_msg; + alloc_hook_t hook; + + memset(&hook, 0, sizeof(hook)); + + PyMemAllocatorEx alloc; + alloc.ctx = &hook; + alloc.malloc = &hook_malloc; + alloc.calloc = &hook_calloc; + alloc.realloc = &hook_realloc; + alloc.free = &hook_free; + PyMem_GetAllocator(domain, &hook.alloc); + PyMem_SetAllocator(domain, &alloc); + + /* malloc, realloc, free */ + size_t size = 42; + hook.ctx = NULL; + void *ptr; + switch(domain) { + case PYMEM_DOMAIN_RAW: + ptr = PyMem_RawMalloc(size); + break; + case PYMEM_DOMAIN_MEM: + ptr = PyMem_Malloc(size); + break; + case PYMEM_DOMAIN_OBJ: + ptr = PyObject_Malloc(size); + break; + default: + ptr = NULL; + break; + } + +#define CHECK_CTX(FUNC) \ + if (hook.ctx != &hook) { \ + error_msg = FUNC " wrong context"; \ + goto fail; \ + } \ + hook.ctx = NULL; /* reset for next check */ + + if (ptr == NULL) { + error_msg = "malloc failed"; + goto fail; + } + CHECK_CTX("malloc"); + if (hook.malloc_size != size) { + error_msg = "malloc invalid size"; + goto fail; + } + + size_t size2 = 200; + void *ptr2; + switch(domain) { + case PYMEM_DOMAIN_RAW: + ptr2 = PyMem_RawRealloc(ptr, size2); + break; + case PYMEM_DOMAIN_MEM: + ptr2 = PyMem_Realloc(ptr, size2); + break; + case PYMEM_DOMAIN_OBJ: + ptr2 = PyObject_Realloc(ptr, size2); + break; + default: + ptr2 = NULL; + break; + } + + if (ptr2 == NULL) { + error_msg = "realloc failed"; + goto fail; + } + CHECK_CTX("realloc"); + if (hook.realloc_ptr != ptr || hook.realloc_new_size != size2) { + error_msg = "realloc invalid parameters"; + goto fail; + } + + switch(domain) { + case PYMEM_DOMAIN_RAW: + PyMem_RawFree(ptr2); + break; + case PYMEM_DOMAIN_MEM: + PyMem_Free(ptr2); + break; + case PYMEM_DOMAIN_OBJ: + PyObject_Free(ptr2); + break; + } + + CHECK_CTX("free"); + if (hook.free_ptr != ptr2) { + error_msg = "free invalid pointer"; + goto fail; + } + + /* calloc, free */ + size_t nelem = 2; + size_t elsize = 5; + switch(domain) { + case PYMEM_DOMAIN_RAW: + ptr = PyMem_RawCalloc(nelem, elsize); + break; + case PYMEM_DOMAIN_MEM: + ptr = PyMem_Calloc(nelem, elsize); + break; + case PYMEM_DOMAIN_OBJ: + ptr = PyObject_Calloc(nelem, elsize); + break; + default: + ptr = NULL; + break; + } + + if (ptr == NULL) { + error_msg = "calloc failed"; + goto fail; + } + CHECK_CTX("calloc"); + if (hook.calloc_nelem != nelem || hook.calloc_elsize != elsize) { + error_msg = "calloc invalid nelem or elsize"; + goto fail; + } + + hook.free_ptr = NULL; + switch(domain) { + case PYMEM_DOMAIN_RAW: + PyMem_RawFree(ptr); + break; + case PYMEM_DOMAIN_MEM: + PyMem_Free(ptr); + break; + case PYMEM_DOMAIN_OBJ: + PyObject_Free(ptr); + break; + } + + CHECK_CTX("calloc free"); + if (hook.free_ptr != ptr) { + error_msg = "calloc free invalid pointer"; + goto fail; + } + + res = Py_NewRef(Py_None); + goto finally; + +fail: + PyErr_SetString(PyExc_RuntimeError, error_msg); + +finally: + PyMem_SetAllocator(domain, &hook.alloc); + return res; + +#undef CHECK_CTX +} + +static PyObject * +test_pyobject_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return test_setallocators(PYMEM_DOMAIN_OBJ); +} + +static PyObject * +test_pyobject_new(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj; + PyTypeObject *type = &PyBaseObject_Type; + PyTypeObject *var_type = &PyLong_Type; + + // PyObject_New() + obj = PyObject_New(PyObject, type); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + // PyObject_NEW() + obj = PyObject_NEW(PyObject, type); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + // PyObject_NewVar() + obj = PyObject_NewVar(PyObject, var_type, 3); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + // PyObject_NEW_VAR() + obj = PyObject_NEW_VAR(PyObject, var_type, 3); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + Py_RETURN_NONE; + +alloc_failed: + PyErr_NoMemory(); + return NULL; +} + +static PyObject * +test_pymem_alloc0(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + void *ptr; + + ptr = PyMem_RawMalloc(0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_RawMalloc(0) returns NULL"); + return NULL; + } + PyMem_RawFree(ptr); + + ptr = PyMem_RawCalloc(0, 0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_RawCalloc(0, 0) returns NULL"); + return NULL; + } + PyMem_RawFree(ptr); + + ptr = PyMem_Malloc(0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_Malloc(0) returns NULL"); + return NULL; + } + PyMem_Free(ptr); + + ptr = PyMem_Calloc(0, 0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_Calloc(0, 0) returns NULL"); + return NULL; + } + PyMem_Free(ptr); + + ptr = PyObject_Malloc(0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyObject_Malloc(0) returns NULL"); + return NULL; + } + PyObject_Free(ptr); + + ptr = PyObject_Calloc(0, 0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyObject_Calloc(0, 0) returns NULL"); + return NULL; + } + PyObject_Free(ptr); + + Py_RETURN_NONE; +} + +static PyObject * +test_pymem_getallocatorsname(PyObject *self, PyObject *args) +{ + const char *name = _PyMem_GetCurrentAllocatorName(); + if (name == NULL) { + PyErr_SetString(PyExc_RuntimeError, "cannot get allocators name"); + return NULL; + } + return PyUnicode_FromString(name); +} + +static PyObject * +test_pymem_setrawallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return test_setallocators(PYMEM_DOMAIN_RAW); +} + +static PyObject * +test_pymem_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return test_setallocators(PYMEM_DOMAIN_MEM); +} + +static PyObject * +pyobject_malloc_without_gil(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate bug to test debug hooks on Python memory allocators: + call PyObject_Malloc() without holding the GIL */ + Py_BEGIN_ALLOW_THREADS + buffer = PyObject_Malloc(10); + Py_END_ALLOW_THREADS + + PyObject_Free(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +pymem_buffer_overflow(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate buffer overflow to check that PyMem_Free() detects + the overflow when debug hooks are installed. */ + buffer = PyMem_Malloc(16); + if (buffer == NULL) { + PyErr_NoMemory(); + return NULL; + } + buffer[16] = 'x'; + PyMem_Free(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +pymem_api_misuse(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate misusage of Python allocators: + allococate with PyMem but release with PyMem_Raw. */ + buffer = PyMem_Malloc(16); + PyMem_RawFree(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +pymem_malloc_without_gil(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate bug to test debug hooks on Python memory allocators: + call PyMem_Malloc() without holding the GIL */ + Py_BEGIN_ALLOW_THREADS + buffer = PyMem_Malloc(10); + Py_END_ALLOW_THREADS + + PyMem_Free(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +test_pyobject_is_freed(const char *test_name, PyObject *op) +{ + if (!_PyObject_IsFreed(op)) { + PyErr_SetString(PyExc_AssertionError, + "object is not seen as freed"); + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +check_pyobject_null_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *op = NULL; + return test_pyobject_is_freed("check_pyobject_null_is_freed", op); +} + + +static PyObject * +check_pyobject_uninitialized_is_freed(PyObject *self, + PyObject *Py_UNUSED(args)) +{ + PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* object fields like ob_type are uninitialized! */ + return test_pyobject_is_freed("check_pyobject_uninitialized_is_freed", op); +} + + +static PyObject * +check_pyobject_forbidden_bytes_is_freed(PyObject *self, + PyObject *Py_UNUSED(args)) +{ + /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ + PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* ob_type field is after the memory block: part of "forbidden bytes" + when using debug hooks on memory allocators! */ + return test_pyobject_is_freed("check_pyobject_forbidden_bytes_is_freed", op); +} + + +static PyObject * +check_pyobject_freed_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +{ + /* This test would fail if run with the address sanitizer */ +#ifdef _Py_ADDRESS_SANITIZER + Py_RETURN_NONE; +#else + PyObject *op = PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type); + if (op == NULL) { + return NULL; + } + Py_TYPE(op)->tp_dealloc(op); + /* Reset reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* object memory is freed! */ + return test_pyobject_is_freed("check_pyobject_freed_is_freed", op); +#endif +} + +static PyMethodDef test_methods[] = { + {"check_pyobject_forbidden_bytes_is_freed", + check_pyobject_forbidden_bytes_is_freed, METH_NOARGS}, + {"check_pyobject_freed_is_freed", check_pyobject_freed_is_freed, METH_NOARGS}, + {"check_pyobject_null_is_freed", check_pyobject_null_is_freed, METH_NOARGS}, + {"check_pyobject_uninitialized_is_freed", + check_pyobject_uninitialized_is_freed, METH_NOARGS}, + {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, + {"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS}, + {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, + {"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS}, + {"pyobject_malloc_without_gil", pyobject_malloc_without_gil, METH_NOARGS}, + {"remove_mem_hooks", remove_mem_hooks, METH_NOARGS, + PyDoc_STR("Remove memory hooks.")}, + {"set_nomemory", (PyCFunction)set_nomemory, METH_VARARGS, + PyDoc_STR("set_nomemory(start:int, stop:int = 0)")}, + {"test_pymem_alloc0", test_pymem_alloc0, METH_NOARGS}, + {"test_pymem_setallocators", test_pymem_setallocators, METH_NOARGS}, + {"test_pymem_setrawallocators", test_pymem_setrawallocators, METH_NOARGS}, + {"test_pyobject_new", test_pyobject_new, METH_NOARGS}, + {"test_pyobject_setallocators", test_pyobject_setallocators, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Mem(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + PyObject *v; +#ifdef WITH_PYMALLOC + v = Py_NewRef(Py_True); +#else + v = Py_NewRef(Py_False); +#endif + int rc = PyModule_AddObjectRef(mod, "WITH_PYMALLOC", v); + Py_DECREF(v); + if (rc < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index 304e5922c0d50b..7ba3c4ebff8cde 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -27,6 +27,15 @@ int _PyTestCapi_Init_Vectorcall(PyObject *module); int _PyTestCapi_Init_Heaptype(PyObject *module); int _PyTestCapi_Init_Unicode(PyObject *module); +int _PyTestCapi_Init_GetArgs(PyObject *module); +int _PyTestCapi_Init_PyTime(PyObject *module); +int _PyTestCapi_Init_DateTime(PyObject *module); +int _PyTestCapi_Init_Docstring(PyObject *module); +int _PyTestCapi_Init_Mem(PyObject *module); +int _PyTestCapi_Init_Watchers(PyObject *module); +int _PyTestCapi_Init_Long(PyObject *module); +int _PyTestCapi_Init_Float(PyObject *module); +int _PyTestCapi_Init_Structmember(PyObject *module); #ifdef LIMITED_API_AVAILABLE int _PyTestCapi_Init_VectorcallLimited(PyObject *module); diff --git a/Modules/_testcapi/pytime.c b/Modules/_testcapi/pytime.c new file mode 100644 index 00000000000000..7422bafc30193a --- /dev/null +++ b/Modules/_testcapi/pytime.c @@ -0,0 +1,274 @@ +#include "parts.h" + +#ifdef MS_WINDOWS +# include <winsock2.h> // struct timeval +#endif + +static PyObject * +test_pytime_fromseconds(PyObject *self, PyObject *args) +{ + int seconds; + if (!PyArg_ParseTuple(args, "i", &seconds)) { + return NULL; + } + _PyTime_t ts = _PyTime_FromSeconds(seconds); + return _PyTime_AsNanosecondsObject(ts); +} + +static int +check_time_rounding(int round) +{ + if (round != _PyTime_ROUND_FLOOR + && round != _PyTime_ROUND_CEILING + && round != _PyTime_ROUND_HALF_EVEN + && round != _PyTime_ROUND_UP) + { + PyErr_SetString(PyExc_ValueError, "invalid rounding"); + return -1; + } + return 0; +} + +static PyObject * +test_pytime_fromsecondsobject(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t ts; + if (_PyTime_FromSecondsObject(&ts, obj, round) == -1) { + return NULL; + } + return _PyTime_AsNanosecondsObject(ts); +} + +static PyObject * +test_pytime_assecondsdouble(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "O", &obj)) { + return NULL; + } + _PyTime_t ts; + if (_PyTime_FromNanosecondsObject(&ts, obj) < 0) { + return NULL; + } + double d = _PyTime_AsSecondsDouble(ts); + return PyFloat_FromDouble(d); +} + +static PyObject * +test_PyTime_AsTimeval(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timeval tv; + if (_PyTime_AsTimeval(t, &tv, round) < 0) { + return NULL; + } + + PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); + if (seconds == NULL) { + return NULL; + } + return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); +} + +static PyObject * +test_PyTime_AsTimeval_clamp(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timeval tv; + _PyTime_AsTimeval_clamp(t, &tv, round); + + PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); + if (seconds == NULL) { + return NULL; + } + return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); +} + +#ifdef HAVE_CLOCK_GETTIME +static PyObject * +test_PyTime_AsTimespec(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "O", &obj)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timespec ts; + if (_PyTime_AsTimespec(t, &ts) == -1) { + return NULL; + } + return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); +} + +static PyObject * +test_PyTime_AsTimespec_clamp(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "O", &obj)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timespec ts; + _PyTime_AsTimespec_clamp(t, &ts); + return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); +} +#endif + +static PyObject * +test_PyTime_AsMilliseconds(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t ms = _PyTime_AsMilliseconds(t, round); + _PyTime_t ns = _PyTime_FromNanoseconds(ms); + return _PyTime_AsNanosecondsObject(ns); +} + +static PyObject * +test_PyTime_AsMicroseconds(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t us = _PyTime_AsMicroseconds(t, round); + _PyTime_t ns = _PyTime_FromNanoseconds(us); + return _PyTime_AsNanosecondsObject(ns); +} + +static PyObject * +test_pytime_object_to_time_t(PyObject *self, PyObject *args) +{ + PyObject *obj; + time_t sec; + int round; + if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_time_t", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + if (_PyTime_ObjectToTime_t(obj, &sec, round) == -1) { + return NULL; + } + return _PyLong_FromTime_t(sec); +} + +static PyObject * +test_pytime_object_to_timeval(PyObject *self, PyObject *args) +{ + PyObject *obj; + time_t sec; + long usec; + int round; + if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timeval", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + if (_PyTime_ObjectToTimeval(obj, &sec, &usec, round) == -1) { + return NULL; + } + return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), usec); +} + +static PyObject * +test_pytime_object_to_timespec(PyObject *self, PyObject *args) +{ + PyObject *obj; + time_t sec; + long nsec; + int round; + if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timespec", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + if (_PyTime_ObjectToTimespec(obj, &sec, &nsec, round) == -1) { + return NULL; + } + return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), nsec); +} + +static PyMethodDef test_methods[] = { + {"PyTime_AsMicroseconds", test_PyTime_AsMicroseconds, METH_VARARGS}, + {"PyTime_AsMilliseconds", test_PyTime_AsMilliseconds, METH_VARARGS}, + {"PyTime_AsSecondsDouble", test_pytime_assecondsdouble, METH_VARARGS}, +#ifdef HAVE_CLOCK_GETTIME + {"PyTime_AsTimespec", test_PyTime_AsTimespec, METH_VARARGS}, + {"PyTime_AsTimespec_clamp", test_PyTime_AsTimespec_clamp, METH_VARARGS}, +#endif + {"PyTime_AsTimeval", test_PyTime_AsTimeval, METH_VARARGS}, + {"PyTime_AsTimeval_clamp", test_PyTime_AsTimeval_clamp, METH_VARARGS}, + {"PyTime_FromSeconds", test_pytime_fromseconds, METH_VARARGS}, + {"PyTime_FromSecondsObject", test_pytime_fromsecondsobject, METH_VARARGS}, + {"pytime_object_to_time_t", test_pytime_object_to_time_t, METH_VARARGS}, + {"pytime_object_to_timespec", test_pytime_object_to_timespec, METH_VARARGS}, + {"pytime_object_to_timeval", test_pytime_object_to_timeval, METH_VARARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_PyTime(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapi/structmember.c b/Modules/_testcapi/structmember.c new file mode 100644 index 00000000000000..0fb872a4328d60 --- /dev/null +++ b/Modules/_testcapi/structmember.c @@ -0,0 +1,217 @@ +#define PY_SSIZE_T_CLEAN +#include "parts.h" +#include <stddef.h> // for offsetof() + + +// This defines two classes that contain all the simple member types, one +// using "new" Py_-prefixed API, and the other using "old" <structmember.h>. +// They should behave identically in Python. + +typedef struct { + char bool_member; + char byte_member; + unsigned char ubyte_member; + short short_member; + unsigned short ushort_member; + int int_member; + unsigned int uint_member; + long long_member; + unsigned long ulong_member; + Py_ssize_t pyssizet_member; + float float_member; + double double_member; + char inplace_member[6]; + long long longlong_member; + unsigned long long ulonglong_member; +} all_structmembers; + +typedef struct { + PyObject_HEAD + all_structmembers structmembers; +} test_structmembers; + + +static struct PyMemberDef test_members_newapi[] = { + {"T_BOOL", Py_T_BOOL, offsetof(test_structmembers, structmembers.bool_member), 0, NULL}, + {"T_BYTE", Py_T_BYTE, offsetof(test_structmembers, structmembers.byte_member), 0, NULL}, + {"T_UBYTE", Py_T_UBYTE, offsetof(test_structmembers, structmembers.ubyte_member), 0, NULL}, + {"T_SHORT", Py_T_SHORT, offsetof(test_structmembers, structmembers.short_member), 0, NULL}, + {"T_USHORT", Py_T_USHORT, offsetof(test_structmembers, structmembers.ushort_member), 0, NULL}, + {"T_INT", Py_T_INT, offsetof(test_structmembers, structmembers.int_member), 0, NULL}, + {"T_UINT", Py_T_UINT, offsetof(test_structmembers, structmembers.uint_member), 0, NULL}, + {"T_LONG", Py_T_LONG, offsetof(test_structmembers, structmembers.long_member), 0, NULL}, + {"T_ULONG", Py_T_ULONG, offsetof(test_structmembers, structmembers.ulong_member), 0, NULL}, + {"T_PYSSIZET", Py_T_PYSSIZET, offsetof(test_structmembers, structmembers.pyssizet_member), 0, NULL}, + {"T_FLOAT", Py_T_FLOAT, offsetof(test_structmembers, structmembers.float_member), 0, NULL}, + {"T_DOUBLE", Py_T_DOUBLE, offsetof(test_structmembers, structmembers.double_member), 0, NULL}, + {"T_STRING_INPLACE", Py_T_STRING_INPLACE, offsetof(test_structmembers, structmembers.inplace_member), 0, NULL}, + {"T_LONGLONG", Py_T_LONGLONG, offsetof(test_structmembers, structmembers.longlong_member), 0, NULL}, + {"T_ULONGLONG", Py_T_ULONGLONG, offsetof(test_structmembers, structmembers.ulonglong_member), 0, NULL}, + {NULL} +}; + +static PyObject * +test_structmembers_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = { + "T_BOOL", "T_BYTE", "T_UBYTE", "T_SHORT", "T_USHORT", + "T_INT", "T_UINT", "T_LONG", "T_ULONG", "T_PYSSIZET", + "T_FLOAT", "T_DOUBLE", "T_STRING_INPLACE", + "T_LONGLONG", "T_ULONGLONG", + NULL}; + static const char fmt[] = "|bbBhHiIlknfds#LK"; + test_structmembers *ob; + const char *s = NULL; + Py_ssize_t string_len = 0; + ob = PyObject_New(test_structmembers, type); + if (ob == NULL) { + return NULL; + } + memset(&ob->structmembers, 0, sizeof(all_structmembers)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, + &ob->structmembers.bool_member, + &ob->structmembers.byte_member, + &ob->structmembers.ubyte_member, + &ob->structmembers.short_member, + &ob->structmembers.ushort_member, + &ob->structmembers.int_member, + &ob->structmembers.uint_member, + &ob->structmembers.long_member, + &ob->structmembers.ulong_member, + &ob->structmembers.pyssizet_member, + &ob->structmembers.float_member, + &ob->structmembers.double_member, + &s, &string_len, + &ob->structmembers.longlong_member, + &ob->structmembers.ulonglong_member)) + { + Py_DECREF(ob); + return NULL; + } + if (s != NULL) { + if (string_len > 5) { + Py_DECREF(ob); + PyErr_SetString(PyExc_ValueError, "string too long"); + return NULL; + } + strcpy(ob->structmembers.inplace_member, s); + } + else { + strcpy(ob->structmembers.inplace_member, ""); + } + return (PyObject *)ob; +} + +static PyType_Slot test_structmembers_slots[] = { + {Py_tp_new, test_structmembers_new}, + {Py_tp_members, test_members_newapi}, + {0}, +}; + +static PyType_Spec test_structmembers_spec = { + .name = "_testcapi._test_structmembersType_NewAPI", + .flags = Py_TPFLAGS_DEFAULT, + .basicsize = sizeof(test_structmembers), + .slots = test_structmembers_slots, +}; + +#include <structmember.h> + +static struct PyMemberDef test_members[] = { + {"T_BOOL", T_BOOL, offsetof(test_structmembers, structmembers.bool_member), 0, NULL}, + {"T_BYTE", T_BYTE, offsetof(test_structmembers, structmembers.byte_member), 0, NULL}, + {"T_UBYTE", T_UBYTE, offsetof(test_structmembers, structmembers.ubyte_member), 0, NULL}, + {"T_SHORT", T_SHORT, offsetof(test_structmembers, structmembers.short_member), 0, NULL}, + {"T_USHORT", T_USHORT, offsetof(test_structmembers, structmembers.ushort_member), 0, NULL}, + {"T_INT", T_INT, offsetof(test_structmembers, structmembers.int_member), 0, NULL}, + {"T_UINT", T_UINT, offsetof(test_structmembers, structmembers.uint_member), 0, NULL}, + {"T_LONG", T_LONG, offsetof(test_structmembers, structmembers.long_member), 0, NULL}, + {"T_ULONG", T_ULONG, offsetof(test_structmembers, structmembers.ulong_member), 0, NULL}, + {"T_PYSSIZET", T_PYSSIZET, offsetof(test_structmembers, structmembers.pyssizet_member), 0, NULL}, + {"T_FLOAT", T_FLOAT, offsetof(test_structmembers, structmembers.float_member), 0, NULL}, + {"T_DOUBLE", T_DOUBLE, offsetof(test_structmembers, structmembers.double_member), 0, NULL}, + {"T_STRING_INPLACE", T_STRING_INPLACE, offsetof(test_structmembers, structmembers.inplace_member), 0, NULL}, + {"T_LONGLONG", T_LONGLONG, offsetof(test_structmembers, structmembers.longlong_member), 0, NULL}, + {"T_ULONGLONG", T_ULONGLONG, offsetof(test_structmembers, structmembers.ulonglong_member), 0, NULL}, + {NULL} +}; + + +static void +test_structmembers_free(PyObject *ob) +{ + PyObject_Free(ob); +} + +/* Designated initializers would work too, but this does test the *old* API */ +static PyTypeObject test_structmembersType_OldAPI= { + PyVarObject_HEAD_INIT(NULL, 0) + "test_structmembersType_OldAPI", + sizeof(test_structmembers), /* tp_basicsize */ + 0, /* tp_itemsize */ + test_structmembers_free, /* destructor tp_dealloc */ + 0, /* tp_vectorcall_offset */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_as_async */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + PyObject_GenericSetAttr, /* tp_setattro */ + 0, /* tp_as_buffer */ + 0, /* tp_flags */ + "Type containing all structmember types", + 0, /* traverseproc tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + test_members, /* tp_members */ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + test_structmembers_new, /* tp_new */ +}; + + +int +_PyTestCapi_Init_Structmember(PyObject *m) +{ + int res; + res = PyType_Ready(&test_structmembersType_OldAPI); + if (res < 0) { + return -1; + } + res = PyModule_AddObject( + m, + "_test_structmembersType_OldAPI", + (PyObject *)&test_structmembersType_OldAPI); + if (res < 0) { + return -1; + } + + PyObject *test_structmembersType_NewAPI = PyType_FromModuleAndSpec( + m, &test_structmembers_spec, NULL); + if (!test_structmembersType_NewAPI) { + return -1; + } + res = PyModule_AddType(m, (PyTypeObject*)test_structmembersType_NewAPI); + Py_DECREF(test_structmembersType_NewAPI); + if (res < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/testcapi_long.h b/Modules/_testcapi/testcapi_long.h similarity index 99% rename from Modules/testcapi_long.h rename to Modules/_testcapi/testcapi_long.h index 6bddad7bb5d249..143258140b4bb4 100644 --- a/Modules/testcapi_long.h +++ b/Modules/_testcapi/testcapi_long.h @@ -202,6 +202,5 @@ TESTNAME(PyObject *error(const char*)) Py_DECREF(Py_None); } - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } diff --git a/Modules/_testcapi/unicode.c b/Modules/_testcapi/unicode.c index d5c4a9e5b95ec6..2d23993ce420b3 100644 --- a/Modules/_testcapi/unicode.c +++ b/Modules/_testcapi/unicode.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include "parts.h" static struct PyModuleDef *_testcapimodule = NULL; // set at initialization @@ -99,6 +100,17 @@ test_widechar(PyObject *self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } +#define NULLABLE(x) do { if (x == Py_None) x = NULL; } while (0); + +/* Test PyUnicode_FromObject() */ +static PyObject * +unicode_fromobject(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_FromObject(arg); +} + +/* Test PyUnicode_AsWideChar() */ static PyObject * unicode_aswidechar(PyObject *self, PyObject *args) { @@ -130,6 +142,7 @@ unicode_aswidechar(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", result, size); } +/* Test PyUnicode_AsWideCharString() */ static PyObject * unicode_aswidecharstring(PyObject *self, PyObject *args) { @@ -151,6 +164,7 @@ unicode_aswidecharstring(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", result, size); } +/* Test PyUnicode_AsUCS4() */ static PyObject * unicode_asucs4(PyObject *self, PyObject *args) { @@ -181,6 +195,7 @@ unicode_asucs4(PyObject *self, PyObject *args) return result; } +/* Test PyUnicode_AsUTF8() */ static PyObject * unicode_asutf8(PyObject *self, PyObject *args) { @@ -199,6 +214,7 @@ unicode_asutf8(PyObject *self, PyObject *args) return PyBytes_FromString(buffer); } +/* Test PyUnicode_AsUTF8AndSize() */ static PyObject * unicode_asutf8andsize(PyObject *self, PyObject *args) { @@ -223,26 +239,228 @@ unicode_asutf8andsize(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", result, utf8_len); } +/* Test PyUnicode_DecodeUTF8() */ +static PyObject * +unicode_decodeutf8(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeUTF8(data, size, errors); +} + +/* Test PyUnicode_DecodeUTF8Stateful() */ +static PyObject * +unicode_decodeutf8stateful(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + Py_ssize_t consumed = 123456789; + PyObject *result; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeUTF8Stateful(data, size, errors, &consumed); + if (!result) { + return NULL; + } + return Py_BuildValue("(Nn)", result, consumed); +} + +/* Test PyUnicode_Concat() */ +static PyObject * +unicode_concat(PyObject *self, PyObject *args) +{ + PyObject *left; + PyObject *right; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + return PyUnicode_Concat(left, right); +} + +/* Test PyUnicode_Split() */ +static PyObject * +unicode_split(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + Py_ssize_t maxsplit = -1; + + if (!PyArg_ParseTuple(args, "OO|n", &s, &sep, &maxsplit)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_Split(s, sep, maxsplit); +} + +/* Test PyUnicode_RSplit() */ +static PyObject * +unicode_rsplit(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + Py_ssize_t maxsplit = -1; + + if (!PyArg_ParseTuple(args, "OO|n", &s, &sep, &maxsplit)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_RSplit(s, sep, maxsplit); +} + +/* Test PyUnicode_Splitlines() */ +static PyObject * +unicode_splitlines(PyObject *self, PyObject *args) +{ + PyObject *s; + int keepends = 0; + + if (!PyArg_ParseTuple(args, "O|i", &s, &keepends)) + return NULL; + + NULLABLE(s); + return PyUnicode_Splitlines(s, keepends); +} + +/* Test PyUnicode_Partition() */ +static PyObject * +unicode_partition(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + + if (!PyArg_ParseTuple(args, "OO", &s, &sep)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_Partition(s, sep); +} + +/* Test PyUnicode_RPartition() */ +static PyObject * +unicode_rpartition(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + + if (!PyArg_ParseTuple(args, "OO", &s, &sep)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_RPartition(s, sep); +} + +/* Test PyUnicode_Translate() */ +static PyObject * +unicode_translate(PyObject *self, PyObject *args) +{ + PyObject *obj; + PyObject *table; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "OO|z", &obj, &table, &errors)) + return NULL; + + NULLABLE(obj); + NULLABLE(table); + return PyUnicode_Translate(obj, table, errors); +} + +/* Test PyUnicode_Join() */ +static PyObject * +unicode_join(PyObject *self, PyObject *args) +{ + PyObject *sep; + PyObject *seq; + + if (!PyArg_ParseTuple(args, "OO", &sep, &seq)) + return NULL; + + NULLABLE(sep); + NULLABLE(seq); + return PyUnicode_Join(sep, seq); +} + +/* Test PyUnicode_Count() */ static PyObject * unicode_count(PyObject *self, PyObject *args) { PyObject *str; PyObject *substr; + Py_ssize_t start; + Py_ssize_t end; Py_ssize_t result; - Py_ssize_t start, end; - if (!PyArg_ParseTuple(args, "UUnn:unicode_count", &str, &substr, - &start, &end)) { + if (!PyArg_ParseTuple(args, "OOnn", &str, &substr, &start, &end)) return NULL; - } + NULLABLE(str); + NULLABLE(substr); result = PyUnicode_Count(str, substr, start, end); if (result == -1) return NULL; - else - return PyLong_FromSsize_t(result); + return PyLong_FromSsize_t(result); +} + +/* Test PyUnicode_Find() */ +static PyObject * +unicode_find(PyObject *self, PyObject *args) +{ + PyObject *str; + PyObject *substr; + Py_ssize_t start; + Py_ssize_t end; + int direction; + Py_ssize_t result; + + if (!PyArg_ParseTuple(args, "OOnni", &str, &substr, &start, &end, &direction)) + return NULL; + + NULLABLE(str); + NULLABLE(substr); + result = PyUnicode_Find(str, substr, start, end, direction); + if (result == -2) + return NULL; + return PyLong_FromSsize_t(result); } +/* Test PyUnicode_Tailmatch() */ +static PyObject * +unicode_tailmatch(PyObject *self, PyObject *args) +{ + PyObject *str; + PyObject *substr; + Py_ssize_t start; + Py_ssize_t end; + int direction; + Py_ssize_t result; + + if (!PyArg_ParseTuple(args, "OOnni", &str, &substr, &start, &end, &direction)) + return NULL; + + NULLABLE(str); + NULLABLE(substr); + result = PyUnicode_Tailmatch(str, substr, start, end, direction); + if (result == -1) + return NULL; + return PyLong_FromSsize_t(result); +} + +/* Test PyUnicode_FindChar() */ static PyObject * unicode_findchar(PyObject *self, PyObject *args) { @@ -264,6 +482,130 @@ unicode_findchar(PyObject *self, PyObject *args) return PyLong_FromSsize_t(result); } +/* Test PyUnicode_Replace() */ +static PyObject * +unicode_replace(PyObject *self, PyObject *args) +{ + PyObject *str; + PyObject *substr; + PyObject *replstr; + Py_ssize_t maxcount = -1; + + if (!PyArg_ParseTuple(args, "OOO|n", &str, &substr, &replstr, &maxcount)) + return NULL; + + NULLABLE(str); + NULLABLE(substr); + NULLABLE(replstr); + return PyUnicode_Replace(str, substr, replstr, maxcount); +} + +/* Test PyUnicode_Compare() */ +static PyObject * +unicode_compare(PyObject *self, PyObject *args) +{ + PyObject *left; + PyObject *right; + int result; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + result = PyUnicode_Compare(left, right); + if (result == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLong(result); +} + +/* Test PyUnicode_CompareWithASCIIString() */ +static PyObject * +unicode_comparewithasciistring(PyObject *self, PyObject *args) +{ + PyObject *left; + const char *right = NULL; + Py_ssize_t right_len; + int result; + + if (!PyArg_ParseTuple(args, "O|y#", &left, &right, &right_len)) + return NULL; + + NULLABLE(left); + result = PyUnicode_CompareWithASCIIString(left, right); + if (result == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLong(result); +} + +/* Test PyUnicode_RichCompare() */ +static PyObject * +unicode_richcompare(PyObject *self, PyObject *args) +{ + PyObject *left; + PyObject *right; + int op; + + if (!PyArg_ParseTuple(args, "OOi", &left, &right, &op)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + return PyUnicode_RichCompare(left, right, op); +} + +/* Test PyUnicode_Format() */ +static PyObject * +unicode_format(PyObject *self, PyObject *args) +{ + PyObject *format; + PyObject *fargs; + + if (!PyArg_ParseTuple(args, "OO", &format, &fargs)) + return NULL; + + NULLABLE(format); + NULLABLE(fargs); + return PyUnicode_Format(format, fargs); +} + +/* Test PyUnicode_Contains() */ +static PyObject * +unicode_contains(PyObject *self, PyObject *args) +{ + PyObject *container; + PyObject *element; + int result; + + if (!PyArg_ParseTuple(args, "OO", &container, &element)) + return NULL; + + NULLABLE(container); + NULLABLE(element); + result = PyUnicode_Contains(container, element); + if (result == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLong(result); +} + +/* Test PyUnicode_IsIdentifier() */ +static PyObject * +unicode_isidentifier(PyObject *self, PyObject *arg) +{ + int result; + + NULLABLE(arg); + result = PyUnicode_IsIdentifier(arg); + if (result == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLong(result); +} + +/* Test PyUnicode_CopyCharacters() */ static PyObject * unicode_copycharacters(PyObject *self, PyObject *args) { @@ -711,13 +1053,33 @@ static PyMethodDef TestMethods[] = { test_unicode_compare_with_ascii, METH_NOARGS}, {"test_string_from_format", test_string_from_format, METH_NOARGS}, {"test_widechar", test_widechar, METH_NOARGS}, + {"unicode_fromobject", unicode_fromobject, METH_O}, {"unicode_aswidechar", unicode_aswidechar, METH_VARARGS}, {"unicode_aswidecharstring", unicode_aswidecharstring, METH_VARARGS}, {"unicode_asucs4", unicode_asucs4, METH_VARARGS}, {"unicode_asutf8", unicode_asutf8, METH_VARARGS}, {"unicode_asutf8andsize", unicode_asutf8andsize, METH_VARARGS}, + {"unicode_decodeutf8", unicode_decodeutf8, METH_VARARGS}, + {"unicode_decodeutf8stateful",unicode_decodeutf8stateful, METH_VARARGS}, + {"unicode_concat", unicode_concat, METH_VARARGS}, + {"unicode_splitlines", unicode_splitlines, METH_VARARGS}, + {"unicode_split", unicode_split, METH_VARARGS}, + {"unicode_rsplit", unicode_rsplit, METH_VARARGS}, + {"unicode_partition", unicode_partition, METH_VARARGS}, + {"unicode_rpartition", unicode_rpartition, METH_VARARGS}, + {"unicode_translate", unicode_translate, METH_VARARGS}, + {"unicode_join", unicode_join, METH_VARARGS}, {"unicode_count", unicode_count, METH_VARARGS}, + {"unicode_tailmatch", unicode_tailmatch, METH_VARARGS}, + {"unicode_find", unicode_find, METH_VARARGS}, {"unicode_findchar", unicode_findchar, METH_VARARGS}, + {"unicode_replace", unicode_replace, METH_VARARGS}, + {"unicode_compare", unicode_compare, METH_VARARGS}, + {"unicode_comparewithasciistring",unicode_comparewithasciistring,METH_VARARGS}, + {"unicode_richcompare", unicode_richcompare, METH_VARARGS}, + {"unicode_format", unicode_format, METH_VARARGS}, + {"unicode_contains", unicode_contains, METH_VARARGS}, + {"unicode_isidentifier", unicode_isidentifier, METH_O}, {"unicode_copycharacters", unicode_copycharacters, METH_VARARGS}, {NULL}, }; diff --git a/Modules/_testcapi/vectorcall.c b/Modules/_testcapi/vectorcall.c index e9c863a7570458..dcbc973c9fb991 100644 --- a/Modules/_testcapi/vectorcall.c +++ b/Modules/_testcapi/vectorcall.c @@ -297,8 +297,7 @@ static PyObject * func_descr_get(PyObject *func, PyObject *obj, PyObject *type) { if (obj == Py_None || obj == NULL) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } return PyMethod_New(func, obj); } @@ -306,15 +305,13 @@ func_descr_get(PyObject *func, PyObject *obj, PyObject *type) static PyObject * nop_descr_get(PyObject *func, PyObject *obj, PyObject *type) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } static PyObject * call_return_args(PyObject *self, PyObject *args, PyObject *kwargs) { - Py_INCREF(args); - return args; + return Py_NewRef(args); } static PyTypeObject MethodDescriptorBase_Type = { diff --git a/Modules/_testcapi/watchers.c b/Modules/_testcapi/watchers.c new file mode 100644 index 00000000000000..2e8fe1dbf78651 --- /dev/null +++ b/Modules/_testcapi/watchers.c @@ -0,0 +1,681 @@ +#include "parts.h" + +#define Py_BUILD_CORE +#include "pycore_function.h" // FUNC_MAX_WATCHERS +#include "pycore_code.h" // CODE_MAX_WATCHERS + +// Test dict watching +static PyObject *g_dict_watch_events; +static int g_dict_watchers_installed; + +static int +dict_watch_callback(PyDict_WatchEvent event, + PyObject *dict, + PyObject *key, + PyObject *new_value) +{ + PyObject *msg; + switch (event) { + case PyDict_EVENT_CLEARED: + msg = PyUnicode_FromString("clear"); + break; + case PyDict_EVENT_DEALLOCATED: + msg = PyUnicode_FromString("dealloc"); + break; + case PyDict_EVENT_CLONED: + msg = PyUnicode_FromString("clone"); + break; + case PyDict_EVENT_ADDED: + msg = PyUnicode_FromFormat("new:%S:%S", key, new_value); + break; + case PyDict_EVENT_MODIFIED: + msg = PyUnicode_FromFormat("mod:%S:%S", key, new_value); + break; + case PyDict_EVENT_DELETED: + msg = PyUnicode_FromFormat("del:%S", key); + break; + default: + msg = PyUnicode_FromString("unknown"); + } + if (msg == NULL) { + return -1; + } + assert(PyList_Check(g_dict_watch_events)); + if (PyList_Append(g_dict_watch_events, msg) < 0) { + Py_DECREF(msg); + return -1; + } + Py_DECREF(msg); + return 0; +} + +static int +dict_watch_callback_second(PyDict_WatchEvent event, + PyObject *dict, + PyObject *key, + PyObject *new_value) +{ + PyObject *msg = PyUnicode_FromString("second"); + if (msg == NULL) { + return -1; + } + int rc = PyList_Append(g_dict_watch_events, msg); + Py_DECREF(msg); + if (rc < 0) { + return -1; + } + return 0; +} + +static int +dict_watch_callback_error(PyDict_WatchEvent event, + PyObject *dict, + PyObject *key, + PyObject *new_value) +{ + PyErr_SetString(PyExc_RuntimeError, "boom!"); + return -1; +} + +static PyObject * +add_dict_watcher(PyObject *self, PyObject *kind) +{ + int watcher_id; + assert(PyLong_Check(kind)); + long kind_l = PyLong_AsLong(kind); + if (kind_l == 2) { + watcher_id = PyDict_AddWatcher(dict_watch_callback_second); + } + else if (kind_l == 1) { + watcher_id = PyDict_AddWatcher(dict_watch_callback_error); + } + else { + watcher_id = PyDict_AddWatcher(dict_watch_callback); + } + if (watcher_id < 0) { + return NULL; + } + if (!g_dict_watchers_installed) { + assert(!g_dict_watch_events); + if (!(g_dict_watch_events = PyList_New(0))) { + return NULL; + } + } + g_dict_watchers_installed++; + return PyLong_FromLong(watcher_id); +} + +static PyObject * +clear_dict_watcher(PyObject *self, PyObject *watcher_id) +{ + if (PyDict_ClearWatcher(PyLong_AsLong(watcher_id))) { + return NULL; + } + g_dict_watchers_installed--; + if (!g_dict_watchers_installed) { + assert(g_dict_watch_events); + Py_CLEAR(g_dict_watch_events); + } + Py_RETURN_NONE; +} + +static PyObject * +watch_dict(PyObject *self, PyObject *args) +{ + PyObject *dict; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &dict)) { + return NULL; + } + if (PyDict_Watch(watcher_id, dict)) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +unwatch_dict(PyObject *self, PyObject *args) +{ + PyObject *dict; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &dict)) { + return NULL; + } + if (PyDict_Unwatch(watcher_id, dict)) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +get_dict_watcher_events(PyObject *self, PyObject *Py_UNUSED(args)) +{ + if (!g_dict_watch_events) { + PyErr_SetString(PyExc_RuntimeError, "no watchers active"); + return NULL; + } + return Py_NewRef(g_dict_watch_events); +} + +// Test type watchers +static PyObject *g_type_modified_events; +static int g_type_watchers_installed; + +static int +type_modified_callback(PyTypeObject *type) +{ + assert(PyList_Check(g_type_modified_events)); + if(PyList_Append(g_type_modified_events, (PyObject *)type) < 0) { + return -1; + } + return 0; +} + +static int +type_modified_callback_wrap(PyTypeObject *type) +{ + assert(PyList_Check(g_type_modified_events)); + PyObject *list = PyList_New(0); + if (list == NULL) { + return -1; + } + if (PyList_Append(list, (PyObject *)type) < 0) { + Py_DECREF(list); + return -1; + } + if (PyList_Append(g_type_modified_events, list) < 0) { + Py_DECREF(list); + return -1; + } + Py_DECREF(list); + return 0; +} + +static int +type_modified_callback_error(PyTypeObject *type) +{ + PyErr_SetString(PyExc_RuntimeError, "boom!"); + return -1; +} + +static PyObject * +add_type_watcher(PyObject *self, PyObject *kind) +{ + int watcher_id; + assert(PyLong_Check(kind)); + long kind_l = PyLong_AsLong(kind); + if (kind_l == 2) { + watcher_id = PyType_AddWatcher(type_modified_callback_wrap); + } + else if (kind_l == 1) { + watcher_id = PyType_AddWatcher(type_modified_callback_error); + } + else { + watcher_id = PyType_AddWatcher(type_modified_callback); + } + if (watcher_id < 0) { + return NULL; + } + if (!g_type_watchers_installed) { + assert(!g_type_modified_events); + if (!(g_type_modified_events = PyList_New(0))) { + return NULL; + } + } + g_type_watchers_installed++; + return PyLong_FromLong(watcher_id); +} + +static PyObject * +clear_type_watcher(PyObject *self, PyObject *watcher_id) +{ + if (PyType_ClearWatcher(PyLong_AsLong(watcher_id))) { + return NULL; + } + g_type_watchers_installed--; + if (!g_type_watchers_installed) { + assert(g_type_modified_events); + Py_CLEAR(g_type_modified_events); + } + Py_RETURN_NONE; +} + +static PyObject * +get_type_modified_events(PyObject *self, PyObject *Py_UNUSED(args)) +{ + if (!g_type_modified_events) { + PyErr_SetString(PyExc_RuntimeError, "no watchers active"); + return NULL; + } + return Py_NewRef(g_type_modified_events); +} + +static PyObject * +watch_type(PyObject *self, PyObject *args) +{ + PyObject *type; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &type)) { + return NULL; + } + if (PyType_Watch(watcher_id, type)) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +unwatch_type(PyObject *self, PyObject *args) +{ + PyObject *type; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &type)) { + return NULL; + } + if (PyType_Unwatch(watcher_id, type)) { + return NULL; + } + Py_RETURN_NONE; +} + + +// Test code object watching + +#define NUM_CODE_WATCHERS 2 +static int num_code_object_created_events[NUM_CODE_WATCHERS] = {0, 0}; +static int num_code_object_destroyed_events[NUM_CODE_WATCHERS] = {0, 0}; + +static int +handle_code_object_event(int which_watcher, PyCodeEvent event, PyCodeObject *co) { + if (event == PY_CODE_EVENT_CREATE) { + num_code_object_created_events[which_watcher]++; + } + else if (event == PY_CODE_EVENT_DESTROY) { + num_code_object_destroyed_events[which_watcher]++; + } + else { + return -1; + } + return 0; +} + +static int +first_code_object_callback(PyCodeEvent event, PyCodeObject *co) +{ + return handle_code_object_event(0, event, co); +} + +static int +second_code_object_callback(PyCodeEvent event, PyCodeObject *co) +{ + return handle_code_object_event(1, event, co); +} + +static int +noop_code_event_handler(PyCodeEvent event, PyCodeObject *co) +{ + return 0; +} + +static PyObject * +add_code_watcher(PyObject *self, PyObject *which_watcher) +{ + int watcher_id; + assert(PyLong_Check(which_watcher)); + long which_l = PyLong_AsLong(which_watcher); + if (which_l == 0) { + watcher_id = PyCode_AddWatcher(first_code_object_callback); + num_code_object_created_events[0] = 0; + num_code_object_destroyed_events[0] = 0; + } + else if (which_l == 1) { + watcher_id = PyCode_AddWatcher(second_code_object_callback); + num_code_object_created_events[1] = 0; + num_code_object_destroyed_events[1] = 0; + } + else { + return NULL; + } + if (watcher_id < 0) { + return NULL; + } + return PyLong_FromLong(watcher_id); +} + +static PyObject * +clear_code_watcher(PyObject *self, PyObject *watcher_id) +{ + assert(PyLong_Check(watcher_id)); + long watcher_id_l = PyLong_AsLong(watcher_id); + if (PyCode_ClearWatcher(watcher_id_l) < 0) { + return NULL; + } + // reset static events counters + if (watcher_id_l >= 0 && watcher_id_l < NUM_CODE_WATCHERS) { + num_code_object_created_events[watcher_id_l] = 0; + num_code_object_destroyed_events[watcher_id_l] = 0; + } + Py_RETURN_NONE; +} + +static PyObject * +get_code_watcher_num_created_events(PyObject *self, PyObject *watcher_id) +{ + assert(PyLong_Check(watcher_id)); + long watcher_id_l = PyLong_AsLong(watcher_id); + assert(watcher_id_l >= 0 && watcher_id_l < NUM_CODE_WATCHERS); + return PyLong_FromLong(num_code_object_created_events[watcher_id_l]); +} + +static PyObject * +get_code_watcher_num_destroyed_events(PyObject *self, PyObject *watcher_id) +{ + assert(PyLong_Check(watcher_id)); + long watcher_id_l = PyLong_AsLong(watcher_id); + assert(watcher_id_l >= 0 && watcher_id_l < NUM_CODE_WATCHERS); + return PyLong_FromLong(num_code_object_destroyed_events[watcher_id_l]); +} + +static PyObject * +allocate_too_many_code_watchers(PyObject *self, PyObject *args) +{ + int watcher_ids[CODE_MAX_WATCHERS + 1]; + int num_watchers = 0; + for (unsigned long i = 0; i < sizeof(watcher_ids) / sizeof(int); i++) { + int watcher_id = PyCode_AddWatcher(noop_code_event_handler); + if (watcher_id == -1) { + break; + } + watcher_ids[i] = watcher_id; + num_watchers++; + } + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + for (int i = 0; i < num_watchers; i++) { + if (PyCode_ClearWatcher(watcher_ids[i]) < 0) { + PyErr_WriteUnraisable(Py_None); + break; + } + } + if (type) { + PyErr_Restore(type, value, traceback); + return NULL; + } + else if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +// Test function watchers + +#define NUM_FUNC_WATCHERS 2 +static PyObject *pyfunc_watchers[NUM_FUNC_WATCHERS]; +static int func_watcher_ids[NUM_FUNC_WATCHERS] = {-1, -1}; + +static PyObject * +get_id(PyObject *obj) +{ + PyObject *builtins = PyEval_GetBuiltins(); // borrowed ref. + if (builtins == NULL) { + return NULL; + } + PyObject *id_str = PyUnicode_FromString("id"); + if (id_str == NULL) { + return NULL; + } + PyObject *id_func = PyObject_GetItem(builtins, id_str); + Py_DECREF(id_str); + if (id_func == NULL) { + return NULL; + } + PyObject *stack[] = {obj}; + PyObject *id = PyObject_Vectorcall(id_func, stack, 1, NULL); + Py_DECREF(id_func); + return id; +} + +static int +call_pyfunc_watcher(PyObject *watcher, PyFunction_WatchEvent event, + PyFunctionObject *func, PyObject *new_value) +{ + PyObject *event_obj = PyLong_FromLong(event); + if (event_obj == NULL) { + return -1; + } + if (new_value == NULL) { + new_value = Py_None; + } + Py_INCREF(new_value); + PyObject *func_or_id = NULL; + if (event == PyFunction_EVENT_DESTROY) { + /* Don't expose a function that's about to be destroyed to managed code */ + func_or_id = get_id((PyObject *) func); + if (func_or_id == NULL) { + Py_DECREF(event_obj); + Py_DECREF(new_value); + return -1; + } + } + else { + Py_INCREF(func); + func_or_id = (PyObject *) func; + } + PyObject *stack[] = {event_obj, func_or_id, new_value}; + PyObject *res = PyObject_Vectorcall(watcher, stack, 3, NULL); + int st = (res == NULL) ? -1 : 0; + Py_XDECREF(res); + Py_DECREF(new_value); + Py_DECREF(event_obj); + Py_DECREF(func_or_id); + return st; +} + +static int +first_func_watcher_callback(PyFunction_WatchEvent event, PyFunctionObject *func, + PyObject *new_value) +{ + return call_pyfunc_watcher(pyfunc_watchers[0], event, func, new_value); +} + +static int +second_func_watcher_callback(PyFunction_WatchEvent event, + PyFunctionObject *func, PyObject *new_value) +{ + return call_pyfunc_watcher(pyfunc_watchers[1], event, func, new_value); +} + +static PyFunction_WatchCallback func_watcher_callbacks[NUM_FUNC_WATCHERS] = { + first_func_watcher_callback, + second_func_watcher_callback +}; + +static int +add_func_event(PyObject *module, const char *name, PyFunction_WatchEvent event) +{ + PyObject *value = PyLong_FromLong(event); + if (value == NULL) { + return -1; + } + int ok = PyModule_AddObjectRef(module, name, value); + Py_DECREF(value); + return ok; +} + +static PyObject * +add_func_watcher(PyObject *self, PyObject *func) +{ + if (!PyFunction_Check(func)) { + PyErr_SetString(PyExc_TypeError, "'func' must be a function"); + return NULL; + } + int idx = -1; + for (int i = 0; i < NUM_FUNC_WATCHERS; i++) { + if (func_watcher_ids[i] == -1) { + idx = i; + break; + } + } + if (idx == -1) { + PyErr_SetString(PyExc_RuntimeError, "no free watchers"); + return NULL; + } + PyObject *result = PyLong_FromLong(idx); + if (result == NULL) { + return NULL; + } + func_watcher_ids[idx] = PyFunction_AddWatcher(func_watcher_callbacks[idx]); + if (func_watcher_ids[idx] < 0) { + Py_DECREF(result); + return NULL; + } + pyfunc_watchers[idx] = Py_NewRef(func); + return result; +} + +static PyObject * +clear_func_watcher(PyObject *self, PyObject *watcher_id_obj) +{ + long watcher_id = PyLong_AsLong(watcher_id_obj); + if ((watcher_id < INT_MIN) || (watcher_id > INT_MAX)) { + PyErr_SetString(PyExc_ValueError, "invalid watcher ID"); + return NULL; + } + int wid = (int) watcher_id; + if (PyFunction_ClearWatcher(wid) < 0) { + return NULL; + } + int idx = -1; + for (int i = 0; i < NUM_FUNC_WATCHERS; i++) { + if (func_watcher_ids[i] == wid) { + idx = i; + break; + } + } + assert(idx != -1); + Py_CLEAR(pyfunc_watchers[idx]); + func_watcher_ids[idx] = -1; + Py_RETURN_NONE; +} + +static int +noop_func_event_handler(PyFunction_WatchEvent event, PyFunctionObject *func, + PyObject *new_value) +{ + return 0; +} + +static PyObject * +allocate_too_many_func_watchers(PyObject *self, PyObject *args) +{ + int watcher_ids[FUNC_MAX_WATCHERS + 1]; + int num_watchers = 0; + for (unsigned long i = 0; i < sizeof(watcher_ids) / sizeof(int); i++) { + int watcher_id = PyFunction_AddWatcher(noop_func_event_handler); + if (watcher_id == -1) { + break; + } + watcher_ids[i] = watcher_id; + num_watchers++; + } + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + for (int i = 0; i < num_watchers; i++) { + if (PyFunction_ClearWatcher(watcher_ids[i]) < 0) { + PyErr_WriteUnraisable(Py_None); + break; + } + } + if (type) { + PyErr_Restore(type, value, traceback); + return NULL; + } + else if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +set_func_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL; + PyObject *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + if (PyFunction_SetDefaults(func, defaults) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +set_func_kwdefaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL; + PyObject *kwdefaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &kwdefaults)) { + return NULL; + } + if (PyFunction_SetKwDefaults(func, kwdefaults) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + // Dict watchers. + {"add_dict_watcher", add_dict_watcher, METH_O, NULL}, + {"clear_dict_watcher", clear_dict_watcher, METH_O, NULL}, + {"watch_dict", watch_dict, METH_VARARGS, NULL}, + {"unwatch_dict", unwatch_dict, METH_VARARGS, NULL}, + {"get_dict_watcher_events", + (PyCFunction) get_dict_watcher_events, METH_NOARGS, NULL}, + + // Type watchers. + {"add_type_watcher", add_type_watcher, METH_O, NULL}, + {"clear_type_watcher", clear_type_watcher, METH_O, NULL}, + {"watch_type", watch_type, METH_VARARGS, NULL}, + {"unwatch_type", unwatch_type, METH_VARARGS, NULL}, + {"get_type_modified_events", + (PyCFunction) get_type_modified_events, METH_NOARGS, NULL}, + + // Code object watchers. + {"add_code_watcher", add_code_watcher, METH_O, NULL}, + {"clear_code_watcher", clear_code_watcher, METH_O, NULL}, + {"get_code_watcher_num_created_events", + get_code_watcher_num_created_events, METH_O, NULL}, + {"get_code_watcher_num_destroyed_events", + get_code_watcher_num_destroyed_events, METH_O, NULL}, + {"allocate_too_many_code_watchers", + (PyCFunction) allocate_too_many_code_watchers, METH_NOARGS, NULL}, + + // Function watchers. + {"add_func_watcher", add_func_watcher, METH_O, NULL}, + {"clear_func_watcher", clear_func_watcher, METH_O, NULL}, + {"set_func_defaults_via_capi", set_func_defaults, METH_VARARGS, NULL}, + {"set_func_kwdefaults_via_capi", set_func_kwdefaults, METH_VARARGS, NULL}, + {"allocate_too_many_func_watchers", allocate_too_many_func_watchers, + METH_NOARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Watchers(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + /* Expose each event as an attribute on the module */ +#define ADD_EVENT(event) \ + if (add_func_event(mod, "PYFUNC_EVENT_" #event, \ + PyFunction_EVENT_##event)) { \ + return -1; \ + } + FOREACH_FUNC_EVENT(ADD_EVENT); +#undef ADD_EVENT + + return 0; +} diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 19ceb108ed4e36..83eef73a875d9d 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -13,7 +13,6 @@ #undef Py_BUILD_CORE_MODULE #undef Py_BUILD_CORE_BUILTIN -#define NEEDS_PY_IDENTIFIER /* Always enable assertions */ #undef NDEBUG @@ -21,16 +20,11 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" -#include "datetime.h" // PyDateTimeAPI #include "marshal.h" // PyMarshal_WriteLongToFile -#include "structmember.h" // PyMemberDef +#include "structmember.h" // for offsetof(), T_OBJECT #include <float.h> // FLT_MAX #include <signal.h> -#ifdef MS_WINDOWS -# include <winsock2.h> // struct timeval -#endif - #ifdef HAVE_SYS_WAIT_H #include <sys/wait.h> // W_STOPCODE #endif @@ -343,8 +337,7 @@ dict_getitem_knownhash(PyObject *self, PyObject *args) return NULL; } - Py_XINCREF(result); - return result; + return Py_XNewRef(result); } /* Issue #4701: Check that PyObject_Hash implicitly calls @@ -467,537 +460,6 @@ test_lazy_hash_inheritance(PyObject* self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } - -/* Tests of PyLong_{As, From}{Unsigned,}Long(), and - PyLong_{As, From}{Unsigned,}LongLong(). - - Note that the meat of the test is contained in testcapi_long.h. - This is revolting, but delicate code duplication is worse: "almost - exactly the same" code is needed to test long long, but the ubiquitous - dependence on type names makes it impossible to use a parameterized - function. A giant macro would be even worse than this. A C++ template - would be perfect. - - The "report an error" functions are deliberately not part of the #include - file: if the test fails, you can set a breakpoint in the appropriate - error function directly, and crawl back from there in the debugger. -*/ - -#define UNBIND(X) Py_DECREF(X); (X) = NULL - -static PyObject * -raise_test_long_error(const char* msg) -{ - return raiseTestError("test_long_api", msg); -} - -#define TESTNAME test_long_api_inner -#define TYPENAME long -#define F_S_TO_PY PyLong_FromLong -#define F_PY_TO_S PyLong_AsLong -#define F_U_TO_PY PyLong_FromUnsignedLong -#define F_PY_TO_U PyLong_AsUnsignedLong - -#include "testcapi_long.h" - -static PyObject * -test_long_api(PyObject* self, PyObject *Py_UNUSED(ignored)) -{ - return TESTNAME(raise_test_long_error); -} - -#undef TESTNAME -#undef TYPENAME -#undef F_S_TO_PY -#undef F_PY_TO_S -#undef F_U_TO_PY -#undef F_PY_TO_U - -static PyObject * -raise_test_longlong_error(const char* msg) -{ - return raiseTestError("test_longlong_api", msg); -} - -#define TESTNAME test_longlong_api_inner -#define TYPENAME long long -#define F_S_TO_PY PyLong_FromLongLong -#define F_PY_TO_S PyLong_AsLongLong -#define F_U_TO_PY PyLong_FromUnsignedLongLong -#define F_PY_TO_U PyLong_AsUnsignedLongLong - -#include "testcapi_long.h" - -static PyObject * -test_longlong_api(PyObject* self, PyObject *args) -{ - return TESTNAME(raise_test_longlong_error); -} - -#undef TESTNAME -#undef TYPENAME -#undef F_S_TO_PY -#undef F_PY_TO_S -#undef F_U_TO_PY -#undef F_PY_TO_U - -/* Test the PyLong_AsLongAndOverflow API. General conversion to PY_LONG - is tested by test_long_api_inner. This test will concentrate on proper - handling of overflow. -*/ - -static PyObject * -test_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *num, *one, *temp; - long value; - int overflow; - - /* Test that overflow is set properly for a large value. */ - /* num is a number larger than LONG_MAX even on 64-bit platforms */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to 1"); - - /* Same again, with num = LONG_MAX + 1 */ - num = PyLong_FromLong(LONG_MAX); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Add(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to 1"); - - /* Test that overflow is set properly for a large negative value. */ - /* num is a number smaller than LONG_MIN even on 64-bit platforms */ - num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to -1"); - - /* Same again, with num = LONG_MIN - 1 */ - num = PyLong_FromLong(LONG_MIN); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Subtract(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to -1"); - - /* Test that overflow is cleared properly for small values. */ - num = PyLong_FromString("FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != 0xFF) - return raiseTestError("test_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromString("-FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -0xFF) - return raiseTestError("test_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was set incorrectly"); - - num = PyLong_FromLong(LONG_MAX); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LONG_MAX) - return raiseTestError("test_long_and_overflow", - "expected return value LONG_MAX"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromLong(LONG_MIN); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LONG_MIN) - return raiseTestError("test_long_and_overflow", - "expected return value LONG_MIN"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - Py_RETURN_NONE; -} - -/* Test the PyLong_AsLongLongAndOverflow API. General conversion to - long long is tested by test_long_api_inner. This test will - concentrate on proper handling of overflow. -*/ - -static PyObject * -test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *num, *one, *temp; - long long value; - int overflow; - - /* Test that overflow is set properly for a large value. */ - /* num is a number larger than LLONG_MAX on a typical machine. */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to 1"); - - /* Same again, with num = LLONG_MAX + 1 */ - num = PyLong_FromLongLong(LLONG_MAX); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Add(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to 1"); - - /* Test that overflow is set properly for a large negative value. */ - /* num is a number smaller than LLONG_MIN on a typical platform */ - num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to -1"); - - /* Same again, with num = LLONG_MIN - 1 */ - num = PyLong_FromLongLong(LLONG_MIN); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Subtract(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to -1"); - - /* Test that overflow is cleared properly for small values. */ - num = PyLong_FromString("FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != 0xFF) - return raiseTestError("test_long_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromString("-FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -0xFF) - return raiseTestError("test_long_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was set incorrectly"); - - num = PyLong_FromLongLong(LLONG_MAX); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LLONG_MAX) - return raiseTestError("test_long_long_and_overflow", - "expected return value LLONG_MAX"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromLongLong(LLONG_MIN); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LLONG_MIN) - return raiseTestError("test_long_long_and_overflow", - "expected return value LLONG_MIN"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - Py_RETURN_NONE; -} - -/* Test the PyLong_As{Size,Ssize}_t API. At present this just tests that - non-integer arguments are handled correctly. It should be extended to - test overflow handling. - */ - -static PyObject * -test_long_as_size_t(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - size_t out_u; - Py_ssize_t out_s; - - Py_INCREF(Py_None); - - out_u = PyLong_AsSize_t(Py_None); - if (out_u != (size_t)-1 || !PyErr_Occurred()) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSize_t(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSize_t(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - out_s = PyLong_AsSsize_t(Py_None); - if (out_s != (Py_ssize_t)-1 || !PyErr_Occurred()) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSsize_t(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSsize_t(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ - return Py_None; -} - -static PyObject * -test_long_as_unsigned_long_long_mask(PyObject *self, - PyObject *Py_UNUSED(ignored)) -{ - unsigned long long res = PyLong_AsUnsignedLongLongMask(NULL); - - if (res != (unsigned long long)-1 || !PyErr_Occurred()) { - return raiseTestError("test_long_as_unsigned_long_long_mask", - "PyLong_AsUnsignedLongLongMask(NULL) didn't " - "complain"); - } - if (!PyErr_ExceptionMatches(PyExc_SystemError)) { - return raiseTestError("test_long_as_unsigned_long_long_mask", - "PyLong_AsUnsignedLongLongMask(NULL) raised " - "something other than SystemError"); - } - PyErr_Clear(); - Py_RETURN_NONE; -} - -/* Test the PyLong_AsDouble API. At present this just tests that - non-integer arguments are handled correctly. - */ - -static PyObject * -test_long_as_double(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - double out; - - Py_INCREF(Py_None); - - out = PyLong_AsDouble(Py_None); - if (out != -1.0 || !PyErr_Occurred()) - return raiseTestError("test_long_as_double", - "PyLong_AsDouble(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_double", - "PyLong_AsDouble(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ - return Py_None; -} - -/* Test the L code for PyArg_ParseTuple. This should deliver a long long - for both long and int arguments. The test may leak a little memory if - it fails. -*/ -static PyObject * -test_L_code(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *tuple, *num; - long long value; - - tuple = PyTuple_New(1); - if (tuple == NULL) - return NULL; - - num = PyLong_FromLong(42); - if (num == NULL) - return NULL; - - PyTuple_SET_ITEM(tuple, 0, num); - - value = -1; - if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { - return NULL; - } - if (value != 42) - return raiseTestError("test_L_code", - "L code returned wrong value for long 42"); - - Py_DECREF(num); - num = PyLong_FromLong(42); - if (num == NULL) - return NULL; - - PyTuple_SET_ITEM(tuple, 0, num); - - value = -1; - if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { - return NULL; - } - if (value != 42) - return raiseTestError("test_L_code", - "L code returned wrong value for int 42"); - - Py_DECREF(tuple); - Py_RETURN_NONE; -} - static PyObject * return_none(void *unused) { @@ -1236,1312 +698,158 @@ test_get_type_qualname(PyObject *self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } - -static PyObject * -get_args(PyObject *self, PyObject *args) -{ - if (args == NULL) { - args = Py_None; - } - Py_INCREF(args); - return args; -} - static PyObject * -get_kwargs(PyObject *self, PyObject *args, PyObject *kwargs) +pyobject_repr_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { - if (kwargs == NULL) { - kwargs = Py_None; - } - Py_INCREF(kwargs); - return kwargs; + return PyObject_Repr(NULL); } -/* Test tuple argument processing */ static PyObject * -getargs_tuple(PyObject *self, PyObject *args) +pyobject_str_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { - int a, b, c; - if (!PyArg_ParseTuple(args, "i(ii)", &a, &b, &c)) - return NULL; - return Py_BuildValue("iii", a, b, c); + return PyObject_Str(NULL); } -/* test PyArg_ParseTupleAndKeywords */ static PyObject * -getargs_keywords(PyObject *self, PyObject *args, PyObject *kwargs) +pyobject_bytes_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { - static char *keywords[] = {"arg1","arg2","arg3","arg4","arg5", NULL}; - static const char fmt[] = "(ii)i|(i(ii))(iii)i"; - int int_args[10]={-1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; - - if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, - &int_args[0], &int_args[1], &int_args[2], &int_args[3], &int_args[4], - &int_args[5], &int_args[6], &int_args[7], &int_args[8], &int_args[9])) - return NULL; - return Py_BuildValue("iiiiiiiiii", - int_args[0], int_args[1], int_args[2], int_args[3], int_args[4], - int_args[5], int_args[6], int_args[7], int_args[8], int_args[9]); + return PyObject_Bytes(NULL); } -/* test PyArg_ParseTupleAndKeywords keyword-only arguments */ static PyObject * -getargs_keyword_only(PyObject *self, PyObject *args, PyObject *kwargs) +raise_exception(PyObject *self, PyObject *args) { - static char *keywords[] = {"required", "optional", "keyword_only", NULL}; - int required = -1; - int optional = -1; - int keyword_only = -1; + PyObject *exc; + PyObject *exc_args, *v; + int num_args, i; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|i$i", keywords, - &required, &optional, &keyword_only)) + if (!PyArg_ParseTuple(args, "Oi:raise_exception", + &exc, &num_args)) return NULL; - return Py_BuildValue("iii", required, optional, keyword_only); -} - -/* test PyArg_ParseTupleAndKeywords positional-only arguments */ -static PyObject * -getargs_positional_only_and_keywords(PyObject *self, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = {"", "", "keyword", NULL}; - int required = -1; - int optional = -1; - int keyword = -1; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|ii", keywords, - &required, &optional, &keyword)) + exc_args = PyTuple_New(num_args); + if (exc_args == NULL) return NULL; - return Py_BuildValue("iii", required, optional, keyword); + for (i = 0; i < num_args; ++i) { + v = PyLong_FromLong(i); + if (v == NULL) { + Py_DECREF(exc_args); + return NULL; + } + PyTuple_SET_ITEM(exc_args, i, v); + } + PyErr_SetObject(exc, exc_args); + Py_DECREF(exc_args); + return NULL; } -/* Functions to call PyArg_ParseTuple with integer format codes, - and return the result. -*/ static PyObject * -getargs_b(PyObject *self, PyObject *args) +set_errno(PyObject *self, PyObject *args) { - unsigned char value; - if (!PyArg_ParseTuple(args, "b", &value)) - return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); -} + int new_errno; -static PyObject * -getargs_B(PyObject *self, PyObject *args) -{ - unsigned char value; - if (!PyArg_ParseTuple(args, "B", &value)) + if (!PyArg_ParseTuple(args, "i:set_errno", &new_errno)) return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); -} -static PyObject * -getargs_h(PyObject *self, PyObject *args) -{ - short value; - if (!PyArg_ParseTuple(args, "h", &value)) - return NULL; - return PyLong_FromLong((long)value); + errno = new_errno; + Py_RETURN_NONE; } static PyObject * -getargs_H(PyObject *self, PyObject *args) +test_set_exception(PyObject *self, PyObject *new_exc) { - unsigned short value; - if (!PyArg_ParseTuple(args, "H", &value)) - return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); -} + PyObject *exc = PyErr_GetHandledException(); + assert(PyExceptionInstance_Check(exc) || exc == NULL); -static PyObject * -getargs_I(PyObject *self, PyObject *args) -{ - unsigned int value; - if (!PyArg_ParseTuple(args, "I", &value)) - return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); + PyErr_SetHandledException(new_exc); + return exc; } static PyObject * -getargs_k(PyObject *self, PyObject *args) +test_set_exc_info(PyObject *self, PyObject *args) { - unsigned long value; - if (!PyArg_ParseTuple(args, "k", &value)) + PyObject *orig_exc; + PyObject *new_type, *new_value, *new_tb; + PyObject *type, *value, *tb; + if (!PyArg_ParseTuple(args, "OOO:test_set_exc_info", + &new_type, &new_value, &new_tb)) return NULL; - return PyLong_FromUnsignedLong(value); -} -static PyObject * -getargs_i(PyObject *self, PyObject *args) -{ - int value; - if (!PyArg_ParseTuple(args, "i", &value)) - return NULL; - return PyLong_FromLong((long)value); -} + PyErr_GetExcInfo(&type, &value, &tb); -static PyObject * -getargs_l(PyObject *self, PyObject *args) -{ - long value; - if (!PyArg_ParseTuple(args, "l", &value)) - return NULL; - return PyLong_FromLong(value); -} + Py_INCREF(new_type); + Py_INCREF(new_value); + Py_INCREF(new_tb); + PyErr_SetExcInfo(new_type, new_value, new_tb); -static PyObject * -getargs_n(PyObject *self, PyObject *args) -{ - Py_ssize_t value; - if (!PyArg_ParseTuple(args, "n", &value)) - return NULL; - return PyLong_FromSsize_t(value); + orig_exc = PyTuple_Pack(3, type ? type : Py_None, value ? value : Py_None, tb ? tb : Py_None); + Py_XDECREF(type); + Py_XDECREF(value); + Py_XDECREF(tb); + return orig_exc; } -static PyObject * -getargs_p(PyObject *self, PyObject *args) -{ - int value; - if (!PyArg_ParseTuple(args, "p", &value)) - return NULL; - return PyLong_FromLong(value); -} +/* test_thread_state spawns a thread of its own, and that thread releases + * `thread_done` when it's finished. The driver code has to know when the + * thread finishes, because the thread uses a PyObject (the callable) that + * may go away when the driver finishes. The former lack of this explicit + * synchronization caused rare segfaults, so rare that they were seen only + * on a Mac buildbot (although they were possible on any box). + */ +static PyThread_type_lock thread_done = NULL; -static PyObject * -getargs_L(PyObject *self, PyObject *args) +static int +_make_call(void *callable) { - long long value; - if (!PyArg_ParseTuple(args, "L", &value)) - return NULL; - return PyLong_FromLongLong(value); + PyObject *rc; + int success; + PyGILState_STATE s = PyGILState_Ensure(); + rc = PyObject_CallNoArgs((PyObject *)callable); + success = (rc != NULL); + Py_XDECREF(rc); + PyGILState_Release(s); + return success; } -static PyObject * -getargs_K(PyObject *self, PyObject *args) +/* Same thing, but releases `thread_done` when it returns. This variant + * should be called only from threads spawned by test_thread_state(). + */ +static void +_make_call_from_thread(void *callable) { - unsigned long long value; - if (!PyArg_ParseTuple(args, "K", &value)) - return NULL; - return PyLong_FromUnsignedLongLong(value); + _make_call(callable); + PyThread_release_lock(thread_done); } -/* This function not only tests the 'k' getargs code, but also the - PyLong_AsUnsignedLongMask() function. */ static PyObject * -test_k_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +test_thread_state(PyObject *self, PyObject *args) { - PyObject *tuple, *num; - unsigned long value; - - tuple = PyTuple_New(1); - if (tuple == NULL) - return NULL; + PyObject *fn; + int success = 1; - /* a number larger than ULONG_MAX even on 64-bit platforms */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) + if (!PyArg_ParseTuple(args, "O:test_thread_state", &fn)) return NULL; - value = PyLong_AsUnsignedLongMask(num); - if (value != ULONG_MAX) - return raiseTestError("test_k_code", - "PyLong_AsUnsignedLongMask() returned wrong value for long 0xFFF...FFF"); - - PyTuple_SET_ITEM(tuple, 0, num); - - value = 0; - if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { + if (!PyCallable_Check(fn)) { + PyErr_Format(PyExc_TypeError, "'%s' object is not callable", + Py_TYPE(fn)->tp_name); return NULL; } - if (value != ULONG_MAX) - return raiseTestError("test_k_code", - "k code returned wrong value for long 0xFFF...FFF"); - - Py_DECREF(num); - num = PyLong_FromString("-FFFFFFFF000000000000000042", NULL, 16); - if (num == NULL) - return NULL; - value = PyLong_AsUnsignedLongMask(num); - if (value != (unsigned long)-0x42) - return raiseTestError("test_k_code", - "PyLong_AsUnsignedLongMask() returned wrong " - "value for long -0xFFF..000042"); - - PyTuple_SET_ITEM(tuple, 0, num); + thread_done = PyThread_allocate_lock(); + if (thread_done == NULL) + return PyErr_NoMemory(); + PyThread_acquire_lock(thread_done, 1); - value = 0; - if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { - return NULL; - } - if (value != (unsigned long)-0x42) - return raiseTestError("test_k_code", - "k code returned wrong value for long -0xFFF..000042"); - - Py_DECREF(tuple); - Py_RETURN_NONE; -} - -static PyObject * -getargs_f(PyObject *self, PyObject *args) -{ - float f; - if (!PyArg_ParseTuple(args, "f", &f)) - return NULL; - return PyFloat_FromDouble(f); -} - -static PyObject * -getargs_d(PyObject *self, PyObject *args) -{ - double d; - if (!PyArg_ParseTuple(args, "d", &d)) - return NULL; - return PyFloat_FromDouble(d); -} - -static PyObject * -getargs_D(PyObject *self, PyObject *args) -{ - Py_complex cval; - if (!PyArg_ParseTuple(args, "D", &cval)) - return NULL; - return PyComplex_FromCComplex(cval); -} - -static PyObject * -getargs_S(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "S", &obj)) - return NULL; - Py_INCREF(obj); - return obj; -} - -static PyObject * -getargs_Y(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "Y", &obj)) - return NULL; - Py_INCREF(obj); - return obj; -} - -static PyObject * -getargs_U(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "U", &obj)) - return NULL; - Py_INCREF(obj); - return obj; -} - -static PyObject * -getargs_c(PyObject *self, PyObject *args) -{ - char c; - if (!PyArg_ParseTuple(args, "c", &c)) - return NULL; - return PyLong_FromLong((unsigned char)c); -} - -static PyObject * -getargs_C(PyObject *self, PyObject *args) -{ - int c; - if (!PyArg_ParseTuple(args, "C", &c)) - return NULL; - return PyLong_FromLong(c); -} - -static PyObject * -getargs_s(PyObject *self, PyObject *args) -{ - char *str; - if (!PyArg_ParseTuple(args, "s", &str)) - return NULL; - return PyBytes_FromString(str); -} - -static PyObject * -getargs_s_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *bytes; - if (!PyArg_ParseTuple(args, "s*", &buffer)) - return NULL; - bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - PyBuffer_Release(&buffer); - return bytes; -} - -static PyObject * -getargs_s_hash(PyObject *self, PyObject *args) -{ - char *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "s#", &str, &size)) - return NULL; - return PyBytes_FromStringAndSize(str, size); -} - -static PyObject * -getargs_z(PyObject *self, PyObject *args) -{ - char *str; - if (!PyArg_ParseTuple(args, "z", &str)) - return NULL; - if (str != NULL) - return PyBytes_FromString(str); - else - Py_RETURN_NONE; -} - -static PyObject * -getargs_z_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *bytes; - if (!PyArg_ParseTuple(args, "z*", &buffer)) - return NULL; - if (buffer.buf != NULL) - bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - else { - Py_INCREF(Py_None); - bytes = Py_None; - } - PyBuffer_Release(&buffer); - return bytes; -} - -static PyObject * -getargs_z_hash(PyObject *self, PyObject *args) -{ - char *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "z#", &str, &size)) - return NULL; - if (str != NULL) - return PyBytes_FromStringAndSize(str, size); - else - Py_RETURN_NONE; -} - -static PyObject * -getargs_y(PyObject *self, PyObject *args) -{ - char *str; - if (!PyArg_ParseTuple(args, "y", &str)) - return NULL; - return PyBytes_FromString(str); -} - -static PyObject * -getargs_y_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *bytes; - if (!PyArg_ParseTuple(args, "y*", &buffer)) - return NULL; - bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - PyBuffer_Release(&buffer); - return bytes; -} - -static PyObject * -getargs_y_hash(PyObject *self, PyObject *args) -{ - char *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "y#", &str, &size)) - return NULL; - return PyBytes_FromStringAndSize(str, size); -} - -static PyObject * -getargs_u(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - if (!PyArg_ParseTuple(args, "u", &str)) - return NULL; - return PyUnicode_FromWideChar(str, -1); -} - -static PyObject * -getargs_u_hash(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "u#", &str, &size)) - return NULL; - return PyUnicode_FromWideChar(str, size); -} - -static PyObject * -getargs_Z(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - if (!PyArg_ParseTuple(args, "Z", &str)) - return NULL; - if (str != NULL) { - return PyUnicode_FromWideChar(str, -1); - } else - Py_RETURN_NONE; -} - -static PyObject * -getargs_Z_hash(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Z#", &str, &size)) - return NULL; - if (str != NULL) - return PyUnicode_FromWideChar(str, size); - else - Py_RETURN_NONE; -} - -static PyObject * -getargs_es(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - char *str; - - if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) - return NULL; - if (!PyArg_Parse(arg, "es", encoding, &str)) - return NULL; - result = PyBytes_FromString(str); - PyMem_Free(str); - return result; -} - -static PyObject * -getargs_et(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - char *str; - - if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) - return NULL; - if (!PyArg_Parse(arg, "et", encoding, &str)) - return NULL; - result = PyBytes_FromString(str); - PyMem_Free(str); - return result; -} - -static PyObject * -getargs_es_hash(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - PyByteArrayObject *buffer = NULL; - char *str = NULL; - Py_ssize_t size; - - if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) - return NULL; - if (buffer != NULL) { - str = PyByteArray_AS_STRING(buffer); - size = PyByteArray_GET_SIZE(buffer); - } - if (!PyArg_Parse(arg, "es#", encoding, &str, &size)) - return NULL; - result = PyBytes_FromStringAndSize(str, size); - if (buffer == NULL) - PyMem_Free(str); - return result; -} - -static PyObject * -getargs_et_hash(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - PyByteArrayObject *buffer = NULL; - char *str = NULL; - Py_ssize_t size; - - if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) - return NULL; - if (buffer != NULL) { - str = PyByteArray_AS_STRING(buffer); - size = PyByteArray_GET_SIZE(buffer); - } - if (!PyArg_Parse(arg, "et#", encoding, &str, &size)) - return NULL; - result = PyBytes_FromStringAndSize(str, size); - if (buffer == NULL) - PyMem_Free(str); - return result; -} - -/* Test the s and z codes for PyArg_ParseTuple. -*/ -static PyObject * -test_s_code(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - /* Unicode strings should be accepted */ - PyObject *tuple, *obj; - char *value; - - tuple = PyTuple_New(1); - if (tuple == NULL) - return NULL; - - obj = PyUnicode_Decode("t\xeate", strlen("t\xeate"), - "latin-1", NULL); - if (obj == NULL) - return NULL; - - PyTuple_SET_ITEM(tuple, 0, obj); - - /* These two blocks used to raise a TypeError: - * "argument must be string without null bytes, not str" - */ - if (!PyArg_ParseTuple(tuple, "s:test_s_code1", &value)) { - return NULL; - } - - if (!PyArg_ParseTuple(tuple, "z:test_s_code2", &value)) { - return NULL; - } - - Py_DECREF(tuple); - Py_RETURN_NONE; -} - -static PyObject * -parse_tuple_and_keywords(PyObject *self, PyObject *args) -{ - PyObject *sub_args; - PyObject *sub_kwargs; - const char *sub_format; - PyObject *sub_keywords; - - Py_ssize_t i, size; - char *keywords[8 + 1]; /* space for NULL at end */ - PyObject *o; - PyObject *converted[8]; - - int result; - PyObject *return_value = NULL; - - double buffers[8][4]; /* double ensures alignment where necessary */ - - if (!PyArg_ParseTuple(args, "OOsO:parse_tuple_and_keywords", - &sub_args, &sub_kwargs, - &sub_format, &sub_keywords)) - return NULL; - - if (!(PyList_CheckExact(sub_keywords) || PyTuple_CheckExact(sub_keywords))) { - PyErr_SetString(PyExc_ValueError, - "parse_tuple_and_keywords: sub_keywords must be either list or tuple"); - return NULL; - } - - memset(buffers, 0, sizeof(buffers)); - memset(converted, 0, sizeof(converted)); - memset(keywords, 0, sizeof(keywords)); - - size = PySequence_Fast_GET_SIZE(sub_keywords); - if (size > 8) { - PyErr_SetString(PyExc_ValueError, - "parse_tuple_and_keywords: too many keywords in sub_keywords"); - goto exit; - } - - for (i = 0; i < size; i++) { - o = PySequence_Fast_GET_ITEM(sub_keywords, i); - if (!PyUnicode_FSConverter(o, (void *)(converted + i))) { - PyErr_Format(PyExc_ValueError, - "parse_tuple_and_keywords: could not convert keywords[%zd] to narrow string", i); - goto exit; - } - keywords[i] = PyBytes_AS_STRING(converted[i]); - } - - result = PyArg_ParseTupleAndKeywords(sub_args, sub_kwargs, - sub_format, keywords, - buffers + 0, buffers + 1, buffers + 2, buffers + 3, - buffers + 4, buffers + 5, buffers + 6, buffers + 7); - - if (result) { - return_value = Py_None; - Py_INCREF(Py_None); - } - -exit: - size = sizeof(converted) / sizeof(converted[0]); - for (i = 0; i < size; i++) { - Py_XDECREF(converted[i]); - } - return return_value; -} - -static PyObject * -getargs_w_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *result; - char *str; - - if (!PyArg_ParseTuple(args, "w*:getargs_w_star", &buffer)) - return NULL; - - if (2 <= buffer.len) { - str = buffer.buf; - str[0] = '['; - str[buffer.len-1] = ']'; - } - - result = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - PyBuffer_Release(&buffer); - return result; -} - - -static PyObject * -test_empty_argparse(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - /* Test that formats can begin with '|'. See issue #4720. */ - PyObject *tuple, *dict = NULL; - static char *kwlist[] = {NULL}; - int result; - tuple = PyTuple_New(0); - if (!tuple) - return NULL; - if (!(result = PyArg_ParseTuple(tuple, "|:test_empty_argparse"))) { - goto done; - } - dict = PyDict_New(); - if (!dict) - goto done; - result = PyArg_ParseTupleAndKeywords(tuple, dict, "|:test_empty_argparse", kwlist); - done: - Py_DECREF(tuple); - Py_XDECREF(dict); - if (!result) { - return NULL; - } - else { - Py_RETURN_NONE; - } -} - -/* Simple test of _PyLong_NumBits and _PyLong_Sign. */ -static PyObject * -test_long_numbits(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - struct triple { - long input; - size_t nbits; - int sign; - } testcases[] = {{0, 0, 0}, - {1L, 1, 1}, - {-1L, 1, -1}, - {2L, 2, 1}, - {-2L, 2, -1}, - {3L, 2, 1}, - {-3L, 2, -1}, - {4L, 3, 1}, - {-4L, 3, -1}, - {0x7fffL, 15, 1}, /* one Python int digit */ - {-0x7fffL, 15, -1}, - {0xffffL, 16, 1}, - {-0xffffL, 16, -1}, - {0xfffffffL, 28, 1}, - {-0xfffffffL, 28, -1}}; - size_t i; - - for (i = 0; i < Py_ARRAY_LENGTH(testcases); ++i) { - size_t nbits; - int sign; - PyObject *plong; - - plong = PyLong_FromLong(testcases[i].input); - if (plong == NULL) - return NULL; - nbits = _PyLong_NumBits(plong); - sign = _PyLong_Sign(plong); - - Py_DECREF(plong); - if (nbits != testcases[i].nbits) - return raiseTestError("test_long_numbits", - "wrong result for _PyLong_NumBits"); - if (sign != testcases[i].sign) - return raiseTestError("test_long_numbits", - "wrong result for _PyLong_Sign"); - } - Py_RETURN_NONE; -} - -static PyObject * -pyobject_repr_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyObject_Repr(NULL); -} - -static PyObject * -pyobject_str_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyObject_Str(NULL); -} - -static PyObject * -pyobject_bytes_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyObject_Bytes(NULL); -} - -static PyObject * -raise_exception(PyObject *self, PyObject *args) -{ - PyObject *exc; - PyObject *exc_args, *v; - int num_args, i; - - if (!PyArg_ParseTuple(args, "Oi:raise_exception", - &exc, &num_args)) - return NULL; - - exc_args = PyTuple_New(num_args); - if (exc_args == NULL) - return NULL; - for (i = 0; i < num_args; ++i) { - v = PyLong_FromLong(i); - if (v == NULL) { - Py_DECREF(exc_args); - return NULL; - } - PyTuple_SET_ITEM(exc_args, i, v); - } - PyErr_SetObject(exc, exc_args); - Py_DECREF(exc_args); - return NULL; -} - -static PyObject * -set_errno(PyObject *self, PyObject *args) -{ - int new_errno; - - if (!PyArg_ParseTuple(args, "i:set_errno", &new_errno)) - return NULL; - - errno = new_errno; - Py_RETURN_NONE; -} - -static PyObject * -test_set_exception(PyObject *self, PyObject *new_exc) -{ - PyObject *exc = PyErr_GetHandledException(); - assert(PyExceptionInstance_Check(exc) || exc == NULL); - - PyErr_SetHandledException(new_exc); - return exc; -} - -static PyObject * -test_set_exc_info(PyObject *self, PyObject *args) -{ - PyObject *orig_exc; - PyObject *new_type, *new_value, *new_tb; - PyObject *type, *value, *tb; - if (!PyArg_ParseTuple(args, "OOO:test_set_exc_info", - &new_type, &new_value, &new_tb)) - return NULL; - - PyErr_GetExcInfo(&type, &value, &tb); - - Py_INCREF(new_type); - Py_INCREF(new_value); - Py_INCREF(new_tb); - PyErr_SetExcInfo(new_type, new_value, new_tb); - - orig_exc = PyTuple_Pack(3, type ? type : Py_None, value ? value : Py_None, tb ? tb : Py_None); - Py_XDECREF(type); - Py_XDECREF(value); - Py_XDECREF(tb); - return orig_exc; -} - -static int test_run_counter = 0; - -static PyObject * -test_datetime_capi(PyObject *self, PyObject *args) { - if (PyDateTimeAPI) { - if (test_run_counter) { - /* Probably regrtest.py -R */ - Py_RETURN_NONE; - } - else { - PyErr_SetString(PyExc_AssertionError, - "PyDateTime_CAPI somehow initialized"); - return NULL; - } - } - test_run_counter++; - PyDateTime_IMPORT; - - if (PyDateTimeAPI) - Py_RETURN_NONE; - else - return NULL; -} - -/* Functions exposing the C API type checking for testing */ -#define MAKE_DATETIME_CHECK_FUNC(check_method, exact_method) \ - PyObject *obj; \ - int exact = 0; \ - if (!PyArg_ParseTuple(args, "O|p", &obj, &exact)) { \ - return NULL; \ - } \ - int rv = exact?exact_method(obj):check_method(obj); \ - if (rv) { \ - Py_RETURN_TRUE; \ - } else { \ - Py_RETURN_FALSE; \ - } - -static PyObject * -datetime_check_date(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyDate_Check, PyDate_CheckExact) -} - -static PyObject * -datetime_check_time(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyTime_Check, PyTime_CheckExact) -} - -static PyObject * -datetime_check_datetime(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyDateTime_Check, PyDateTime_CheckExact) -} - -static PyObject * -datetime_check_delta(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyDelta_Check, PyDelta_CheckExact) -} - -static PyObject * -datetime_check_tzinfo(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyTZInfo_Check, PyTZInfo_CheckExact) -} - - -/* Makes three variations on timezone representing UTC-5: - 1. timezone with offset and name from PyDateTimeAPI - 2. timezone with offset and name from PyTimeZone_FromOffsetAndName - 3. timezone with offset (no name) from PyTimeZone_FromOffset -*/ -static PyObject * -make_timezones_capi(PyObject *self, PyObject *args) { - PyObject *offset = PyDelta_FromDSU(0, -18000, 0); - PyObject *name = PyUnicode_FromString("EST"); - - PyObject *est_zone_capi = PyDateTimeAPI->TimeZone_FromTimeZone(offset, name); - PyObject *est_zone_macro = PyTimeZone_FromOffsetAndName(offset, name); - PyObject *est_zone_macro_noname = PyTimeZone_FromOffset(offset); - - Py_DecRef(offset); - Py_DecRef(name); - - PyObject *rv = PyTuple_New(3); - - PyTuple_SET_ITEM(rv, 0, est_zone_capi); - PyTuple_SET_ITEM(rv, 1, est_zone_macro); - PyTuple_SET_ITEM(rv, 2, est_zone_macro_noname); - - return rv; -} - -static PyObject * -get_timezones_offset_zero(PyObject *self, PyObject *args) { - PyObject *offset = PyDelta_FromDSU(0, 0, 0); - PyObject *name = PyUnicode_FromString(""); - - // These two should return the UTC singleton - PyObject *utc_singleton_0 = PyTimeZone_FromOffset(offset); - PyObject *utc_singleton_1 = PyTimeZone_FromOffsetAndName(offset, NULL); - - // This one will return +00:00 zone, but not the UTC singleton - PyObject *non_utc_zone = PyTimeZone_FromOffsetAndName(offset, name); - - Py_DecRef(offset); - Py_DecRef(name); - - PyObject *rv = PyTuple_New(3); - PyTuple_SET_ITEM(rv, 0, utc_singleton_0); - PyTuple_SET_ITEM(rv, 1, utc_singleton_1); - PyTuple_SET_ITEM(rv, 2, non_utc_zone); - - return rv; -} - -static PyObject * -get_timezone_utc_capi(PyObject* self, PyObject *args) { - int macro = 0; - if (!PyArg_ParseTuple(args, "|p", ¯o)) { - return NULL; - } - if (macro) { - Py_INCREF(PyDateTime_TimeZone_UTC); - return PyDateTime_TimeZone_UTC; - } else { - Py_INCREF(PyDateTimeAPI->TimeZone_UTC); - return PyDateTimeAPI->TimeZone_UTC; - } -} - -static PyObject * -get_date_fromdate(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int year, month, day; - - if (!PyArg_ParseTuple(args, "piii", ¯o, &year, &month, &day)) { - return NULL; - } - - if (macro) { - rv = PyDate_FromDate(year, month, day); - } - else { - rv = PyDateTimeAPI->Date_FromDate( - year, month, day, - PyDateTimeAPI->DateType); - } - return rv; -} - -static PyObject * -get_datetime_fromdateandtime(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int year, month, day; - int hour, minute, second, microsecond; - - if (!PyArg_ParseTuple(args, "piiiiiii", - ¯o, - &year, &month, &day, - &hour, &minute, &second, µsecond)) { - return NULL; - } - - if (macro) { - rv = PyDateTime_FromDateAndTime( - year, month, day, - hour, minute, second, microsecond); - } - else { - rv = PyDateTimeAPI->DateTime_FromDateAndTime( - year, month, day, - hour, minute, second, microsecond, - Py_None, - PyDateTimeAPI->DateTimeType); - } - return rv; -} - -static PyObject * -get_datetime_fromdateandtimeandfold(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int year, month, day; - int hour, minute, second, microsecond, fold; - - if (!PyArg_ParseTuple(args, "piiiiiiii", - ¯o, - &year, &month, &day, - &hour, &minute, &second, µsecond, - &fold)) { - return NULL; - } - - if (macro) { - rv = PyDateTime_FromDateAndTimeAndFold( - year, month, day, - hour, minute, second, microsecond, - fold); - } - else { - rv = PyDateTimeAPI->DateTime_FromDateAndTimeAndFold( - year, month, day, - hour, minute, second, microsecond, - Py_None, - fold, - PyDateTimeAPI->DateTimeType); - } - return rv; -} - -static PyObject * -get_time_fromtime(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int hour, minute, second, microsecond; - - if (!PyArg_ParseTuple(args, "piiii", - ¯o, - &hour, &minute, &second, µsecond)) { - return NULL; - } - - if (macro) { - rv = PyTime_FromTime(hour, minute, second, microsecond); - } - else { - rv = PyDateTimeAPI->Time_FromTime( - hour, minute, second, microsecond, - Py_None, - PyDateTimeAPI->TimeType); - } - return rv; -} - -static PyObject * -get_time_fromtimeandfold(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int hour, minute, second, microsecond, fold; - - if (!PyArg_ParseTuple(args, "piiiii", - ¯o, - &hour, &minute, &second, µsecond, - &fold)) { - return NULL; - } - - if (macro) { - rv = PyTime_FromTimeAndFold(hour, minute, second, microsecond, fold); - } - else { - rv = PyDateTimeAPI->Time_FromTimeAndFold( - hour, minute, second, microsecond, - Py_None, - fold, - PyDateTimeAPI->TimeType); - } - return rv; -} - -static PyObject * -get_delta_fromdsu(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int days, seconds, microseconds; - - if (!PyArg_ParseTuple(args, "piii", - ¯o, - &days, &seconds, µseconds)) { - return NULL; - } - - if (macro) { - rv = PyDelta_FromDSU(days, seconds, microseconds); - } - else { - rv = PyDateTimeAPI->Delta_FromDelta( - days, seconds, microseconds, 1, - PyDateTimeAPI->DeltaType); - } - - return rv; -} - -static PyObject * -get_date_fromtimestamp(PyObject* self, PyObject *args) -{ - PyObject *tsargs = NULL, *ts = NULL, *rv = NULL; - int macro = 0; - - if (!PyArg_ParseTuple(args, "O|p", &ts, ¯o)) { - return NULL; - } - - // Construct the argument tuple - if ((tsargs = PyTuple_Pack(1, ts)) == NULL) { - return NULL; - } - - // Pass along to the API function - if (macro) { - rv = PyDate_FromTimestamp(tsargs); - } - else { - rv = PyDateTimeAPI->Date_FromTimestamp( - (PyObject *)PyDateTimeAPI->DateType, tsargs - ); - } - - Py_DECREF(tsargs); - return rv; -} - -static PyObject * -get_datetime_fromtimestamp(PyObject* self, PyObject *args) -{ - int macro = 0; - int usetz = 0; - PyObject *tsargs = NULL, *ts = NULL, *tzinfo = Py_None, *rv = NULL; - if (!PyArg_ParseTuple(args, "OO|pp", &ts, &tzinfo, &usetz, ¯o)) { - return NULL; - } - - // Construct the argument tuple - if (usetz) { - tsargs = PyTuple_Pack(2, ts, tzinfo); - } - else { - tsargs = PyTuple_Pack(1, ts); - } - - if (tsargs == NULL) { - return NULL; - } - - // Pass along to the API function - if (macro) { - rv = PyDateTime_FromTimestamp(tsargs); - } - else { - rv = PyDateTimeAPI->DateTime_FromTimestamp( - (PyObject *)PyDateTimeAPI->DateTimeType, tsargs, NULL - ); - } - - Py_DECREF(tsargs); - return rv; -} - -static PyObject * -test_PyDateTime_GET(PyObject *self, PyObject *obj) -{ - int year, month, day; - - year = PyDateTime_GET_YEAR(obj); - month = PyDateTime_GET_MONTH(obj); - day = PyDateTime_GET_DAY(obj); - - return Py_BuildValue("(iii)", year, month, day); -} - -static PyObject * -test_PyDateTime_DATE_GET(PyObject *self, PyObject *obj) -{ - int hour, minute, second, microsecond; - - hour = PyDateTime_DATE_GET_HOUR(obj); - minute = PyDateTime_DATE_GET_MINUTE(obj); - second = PyDateTime_DATE_GET_SECOND(obj); - microsecond = PyDateTime_DATE_GET_MICROSECOND(obj); - PyObject *tzinfo = PyDateTime_DATE_GET_TZINFO(obj); - - return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); -} - -static PyObject * -test_PyDateTime_TIME_GET(PyObject *self, PyObject *obj) -{ - int hour, minute, second, microsecond; - - hour = PyDateTime_TIME_GET_HOUR(obj); - minute = PyDateTime_TIME_GET_MINUTE(obj); - second = PyDateTime_TIME_GET_SECOND(obj); - microsecond = PyDateTime_TIME_GET_MICROSECOND(obj); - PyObject *tzinfo = PyDateTime_TIME_GET_TZINFO(obj); - - return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); -} - -static PyObject * -test_PyDateTime_DELTA_GET(PyObject *self, PyObject *obj) -{ - int days, seconds, microseconds; - - days = PyDateTime_DELTA_GET_DAYS(obj); - seconds = PyDateTime_DELTA_GET_SECONDS(obj); - microseconds = PyDateTime_DELTA_GET_MICROSECONDS(obj); - - return Py_BuildValue("(iii)", days, seconds, microseconds); -} - -/* test_thread_state spawns a thread of its own, and that thread releases - * `thread_done` when it's finished. The driver code has to know when the - * thread finishes, because the thread uses a PyObject (the callable) that - * may go away when the driver finishes. The former lack of this explicit - * synchronization caused rare segfaults, so rare that they were seen only - * on a Mac buildbot (although they were possible on any box). - */ -static PyThread_type_lock thread_done = NULL; - -static int -_make_call(void *callable) -{ - PyObject *rc; - int success; - PyGILState_STATE s = PyGILState_Ensure(); - rc = PyObject_CallNoArgs((PyObject *)callable); - success = (rc != NULL); - Py_XDECREF(rc); - PyGILState_Release(s); - return success; -} - -/* Same thing, but releases `thread_done` when it returns. This variant - * should be called only from threads spawned by test_thread_state(). - */ -static void -_make_call_from_thread(void *callable) -{ - _make_call(callable); - PyThread_release_lock(thread_done); -} - -static PyObject * -test_thread_state(PyObject *self, PyObject *args) -{ - PyObject *fn; - int success = 1; - - if (!PyArg_ParseTuple(args, "O:test_thread_state", &fn)) - return NULL; - - if (!PyCallable_Check(fn)) { - PyErr_Format(PyExc_TypeError, "'%s' object is not callable", - Py_TYPE(fn)->tp_name); - return NULL; - } - - thread_done = PyThread_allocate_lock(); - if (thread_done == NULL) - return PyErr_NoMemory(); - PyThread_acquire_lock(thread_done, 1); - - /* Start a new thread with our callback. */ - PyThread_start_new_thread(_make_call_from_thread, fn); - /* Make the callback with the thread lock held by this thread */ - success &= _make_call(fn); - /* Do it all again, but this time with the thread-lock released */ - Py_BEGIN_ALLOW_THREADS - success &= _make_call(fn); - PyThread_acquire_lock(thread_done, 1); /* wait for thread to finish */ - Py_END_ALLOW_THREADS + /* Start a new thread with our callback. */ + PyThread_start_new_thread(_make_call_from_thread, fn); + /* Make the callback with the thread lock held by this thread */ + success &= _make_call(fn); + /* Do it all again, but this time with the thread-lock released */ + Py_BEGIN_ALLOW_THREADS + success &= _make_call(fn); + PyThread_acquire_lock(thread_done, 1); /* wait for thread to finish */ + Py_END_ALLOW_THREADS /* And once more with and without a thread XXX - should use a lock and work out exactly what we are trying @@ -2598,13 +906,6 @@ pending_threadfunc(PyObject *self, PyObject *arg) Py_RETURN_TRUE; } -/* This is here to provide a docstring for test_descr. */ -static PyObject * -test_with_docstring(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - Py_RETURN_NONE; -} - /* Test PyOS_string_to_double. */ static PyObject * test_string_to_double(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -2985,8 +1286,7 @@ failing_converter(PyObject *obj, void *arg) { /* Clone str1, then let the conversion fail. */ assert(str1); - str2 = str1; - Py_INCREF(str2); + str2 = Py_NewRef(str1); return 0; } static PyObject* @@ -3295,70 +1595,9 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) return PyLong_FromLong(r); } -static int -check_time_rounding(int round) -{ - if (round != _PyTime_ROUND_FLOOR - && round != _PyTime_ROUND_CEILING - && round != _PyTime_ROUND_HALF_EVEN - && round != _PyTime_ROUND_UP) { - PyErr_SetString(PyExc_ValueError, "invalid rounding"); - return -1; - } - return 0; -} - -static PyObject * -test_pytime_object_to_time_t(PyObject *self, PyObject *args) -{ - PyObject *obj; - time_t sec; - int round; - if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_time_t", &obj, &round)) - return NULL; - if (check_time_rounding(round) < 0) - return NULL; - if (_PyTime_ObjectToTime_t(obj, &sec, round) == -1) - return NULL; - return _PyLong_FromTime_t(sec); -} - -static PyObject * -test_pytime_object_to_timeval(PyObject *self, PyObject *args) -{ - PyObject *obj; - time_t sec; - long usec; - int round; - if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timeval", &obj, &round)) - return NULL; - if (check_time_rounding(round) < 0) - return NULL; - if (_PyTime_ObjectToTimeval(obj, &sec, &usec, round) == -1) - return NULL; - return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), usec); -} - -static PyObject * -test_pytime_object_to_timespec(PyObject *self, PyObject *args) -{ - PyObject *obj; - time_t sec; - long nsec; - int round; - if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timespec", &obj, &round)) - return NULL; - if (check_time_rounding(round) < 0) - return NULL; - if (_PyTime_ObjectToTimespec(obj, &sec, &nsec, round) == -1) - return NULL; - return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), nsec); -} - static void slot_tp_del(PyObject *self) { - _Py_IDENTIFIER(__tp_del__); PyObject *del, *res; PyObject *error_type, *error_value, *error_traceback; @@ -3369,15 +1608,21 @@ slot_tp_del(PyObject *self) /* Save the current exception, if any. */ PyErr_Fetch(&error_type, &error_value, &error_traceback); + PyObject *tp_del = PyUnicode_InternFromString("__tp_del__"); + if (tp_del == NULL) { + PyErr_WriteUnraisable(NULL); + PyErr_Restore(error_type, error_value, error_traceback); + return; + } /* Execute __del__ method, if any. */ - del = _PyObject_LookupSpecialId(self, &PyId___tp_del__); + del = _PyType_Lookup(Py_TYPE(self), tp_del); + Py_DECREF(tp_del); if (del != NULL) { - res = PyObject_CallNoArgs(del); + res = PyObject_CallOneArg(del, self); if (res == NULL) PyErr_WriteUnraisable(del); else Py_DECREF(res); - Py_DECREF(del); } /* Restore the saved exception. */ @@ -3424,8 +1669,7 @@ with_tp_del(PyObject *self, PyObject *args) return NULL; } tp->tp_del = slot_tp_del; - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } static PyObject * @@ -3443,8 +1687,7 @@ without_gc(PyObject *Py_UNUSED(self), PyObject *obj) tp->tp_clear = NULL; } assert(!PyType_IS_GC(tp)); - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } static PyMethodDef ml; @@ -3465,8 +1708,7 @@ static PyMethodDef ml = { static PyObject * _test_incref(PyObject *ob) { - Py_INCREF(ob); - return ob; + return Py_NewRef(ob); } static PyObject * @@ -3482,549 +1724,83 @@ test_xincref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) static PyObject * test_incref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_INCREF(_test_incref(obj)); - Py_DECREF(obj); - Py_DECREF(obj); - Py_DECREF(obj); - Py_RETURN_NONE; -} - -static PyObject * -test_xdecref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - Py_XDECREF(PyLong_FromLong(0)); - Py_RETURN_NONE; -} - -static PyObject * -test_decref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - Py_DECREF(PyLong_FromLong(0)); - Py_RETURN_NONE; -} - -static PyObject * -test_structseq_newtype_doesnt_leak(PyObject *Py_UNUSED(self), - PyObject *Py_UNUSED(args)) -{ - PyStructSequence_Desc descr; - PyStructSequence_Field descr_fields[3]; - - descr_fields[0] = (PyStructSequence_Field){"foo", "foo value"}; - descr_fields[1] = (PyStructSequence_Field){NULL, "some hidden value"}; - descr_fields[2] = (PyStructSequence_Field){0, NULL}; - - descr.name = "_testcapi.test_descr"; - descr.doc = "This is used to test for memory leaks in NewType"; - descr.fields = descr_fields; - descr.n_in_sequence = 1; - - PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); - assert(structseq_type != NULL); - assert(PyType_Check(structseq_type)); - assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); - Py_DECREF(structseq_type); - - Py_RETURN_NONE; -} - -static PyObject * -test_structseq_newtype_null_descr_doc(PyObject *Py_UNUSED(self), - PyObject *Py_UNUSED(args)) -{ - PyStructSequence_Field descr_fields[1] = { - (PyStructSequence_Field){NULL, NULL} - }; - // Test specifically for NULL .doc field. - PyStructSequence_Desc descr = {"_testcapi.test_descr", NULL, &descr_fields[0], 0}; - - PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); - assert(structseq_type != NULL); - assert(PyType_Check(structseq_type)); - assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); - Py_DECREF(structseq_type); - - Py_RETURN_NONE; -} - -static PyObject * -test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_IncRef(obj); - Py_DecRef(obj); - Py_DecRef(obj); - Py_RETURN_NONE; -} - -static PyObject * -test_pymem_alloc0(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - void *ptr; - - ptr = PyMem_RawMalloc(0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_RawMalloc(0) returns NULL"); - return NULL; - } - PyMem_RawFree(ptr); - - ptr = PyMem_RawCalloc(0, 0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_RawCalloc(0, 0) returns NULL"); - return NULL; - } - PyMem_RawFree(ptr); - - ptr = PyMem_Malloc(0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_Malloc(0) returns NULL"); - return NULL; - } - PyMem_Free(ptr); - - ptr = PyMem_Calloc(0, 0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_Calloc(0, 0) returns NULL"); - return NULL; - } - PyMem_Free(ptr); - - ptr = PyObject_Malloc(0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyObject_Malloc(0) returns NULL"); - return NULL; - } - PyObject_Free(ptr); - - ptr = PyObject_Calloc(0, 0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyObject_Calloc(0, 0) returns NULL"); - return NULL; - } - PyObject_Free(ptr); - - Py_RETURN_NONE; -} - -static PyObject * -test_pyobject_new(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj; - PyTypeObject *type = &PyBaseObject_Type; - PyTypeObject *var_type = &PyLong_Type; - - // PyObject_New() - obj = PyObject_New(PyObject, type); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - // PyObject_NEW() - obj = PyObject_NEW(PyObject, type); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - // PyObject_NewVar() - obj = PyObject_NewVar(PyObject, var_type, 3); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - // PyObject_NEW_VAR() - obj = PyObject_NEW_VAR(PyObject, var_type, 3); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - Py_RETURN_NONE; - -alloc_failed: - PyErr_NoMemory(); - return NULL; -} - -typedef struct { - PyMemAllocatorEx alloc; - - size_t malloc_size; - size_t calloc_nelem; - size_t calloc_elsize; - void *realloc_ptr; - size_t realloc_new_size; - void *free_ptr; - void *ctx; -} alloc_hook_t; - -static void* hook_malloc(void* ctx, size_t size) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->malloc_size = size; - return hook->alloc.malloc(hook->alloc.ctx, size); -} - -static void* hook_calloc(void* ctx, size_t nelem, size_t elsize) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->calloc_nelem = nelem; - hook->calloc_elsize = elsize; - return hook->alloc.calloc(hook->alloc.ctx, nelem, elsize); -} - -static void* hook_realloc(void* ctx, void* ptr, size_t new_size) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->realloc_ptr = ptr; - hook->realloc_new_size = new_size; - return hook->alloc.realloc(hook->alloc.ctx, ptr, new_size); -} - -static void hook_free(void *ctx, void *ptr) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->free_ptr = ptr; - hook->alloc.free(hook->alloc.ctx, ptr); -} - -static PyObject * -test_setallocators(PyMemAllocatorDomain domain) -{ - PyObject *res = NULL; - const char *error_msg; - alloc_hook_t hook; - PyMemAllocatorEx alloc; - size_t size, size2, nelem, elsize; - void *ptr, *ptr2; - - memset(&hook, 0, sizeof(hook)); - - alloc.ctx = &hook; - alloc.malloc = &hook_malloc; - alloc.calloc = &hook_calloc; - alloc.realloc = &hook_realloc; - alloc.free = &hook_free; - PyMem_GetAllocator(domain, &hook.alloc); - PyMem_SetAllocator(domain, &alloc); - - /* malloc, realloc, free */ - size = 42; - hook.ctx = NULL; - switch(domain) - { - case PYMEM_DOMAIN_RAW: ptr = PyMem_RawMalloc(size); break; - case PYMEM_DOMAIN_MEM: ptr = PyMem_Malloc(size); break; - case PYMEM_DOMAIN_OBJ: ptr = PyObject_Malloc(size); break; - default: ptr = NULL; break; - } - -#define CHECK_CTX(FUNC) \ - if (hook.ctx != &hook) { \ - error_msg = FUNC " wrong context"; \ - goto fail; \ - } \ - hook.ctx = NULL; /* reset for next check */ - - if (ptr == NULL) { - error_msg = "malloc failed"; - goto fail; - } - CHECK_CTX("malloc"); - if (hook.malloc_size != size) { - error_msg = "malloc invalid size"; - goto fail; - } - - size2 = 200; - switch(domain) - { - case PYMEM_DOMAIN_RAW: ptr2 = PyMem_RawRealloc(ptr, size2); break; - case PYMEM_DOMAIN_MEM: ptr2 = PyMem_Realloc(ptr, size2); break; - case PYMEM_DOMAIN_OBJ: ptr2 = PyObject_Realloc(ptr, size2); break; - default: ptr2 = NULL; break; - } - - if (ptr2 == NULL) { - error_msg = "realloc failed"; - goto fail; - } - CHECK_CTX("realloc"); - if (hook.realloc_ptr != ptr - || hook.realloc_new_size != size2) { - error_msg = "realloc invalid parameters"; - goto fail; - } - - switch(domain) - { - case PYMEM_DOMAIN_RAW: PyMem_RawFree(ptr2); break; - case PYMEM_DOMAIN_MEM: PyMem_Free(ptr2); break; - case PYMEM_DOMAIN_OBJ: PyObject_Free(ptr2); break; - } - - CHECK_CTX("free"); - if (hook.free_ptr != ptr2) { - error_msg = "free invalid pointer"; - goto fail; - } - - /* calloc, free */ - nelem = 2; - elsize = 5; - switch(domain) - { - case PYMEM_DOMAIN_RAW: ptr = PyMem_RawCalloc(nelem, elsize); break; - case PYMEM_DOMAIN_MEM: ptr = PyMem_Calloc(nelem, elsize); break; - case PYMEM_DOMAIN_OBJ: ptr = PyObject_Calloc(nelem, elsize); break; - default: ptr = NULL; break; - } - - if (ptr == NULL) { - error_msg = "calloc failed"; - goto fail; - } - CHECK_CTX("calloc"); - if (hook.calloc_nelem != nelem || hook.calloc_elsize != elsize) { - error_msg = "calloc invalid nelem or elsize"; - goto fail; - } - - hook.free_ptr = NULL; - switch(domain) - { - case PYMEM_DOMAIN_RAW: PyMem_RawFree(ptr); break; - case PYMEM_DOMAIN_MEM: PyMem_Free(ptr); break; - case PYMEM_DOMAIN_OBJ: PyObject_Free(ptr); break; - } - - CHECK_CTX("calloc free"); - if (hook.free_ptr != ptr) { - error_msg = "calloc free invalid pointer"; - goto fail; - } - - Py_INCREF(Py_None); - res = Py_None; - goto finally; - -fail: - PyErr_SetString(PyExc_RuntimeError, error_msg); - -finally: - PyMem_SetAllocator(domain, &hook.alloc); - return res; - -#undef CHECK_CTX -} - -static PyObject * -test_pymem_setrawallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return test_setallocators(PYMEM_DOMAIN_RAW); -} - -static PyObject * -test_pymem_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return test_setallocators(PYMEM_DOMAIN_MEM); -} - -static PyObject * -test_pyobject_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return test_setallocators(PYMEM_DOMAIN_OBJ); -} - -/* Most part of the following code is inherited from the pyfailmalloc project - * written by Victor Stinner. */ -static struct { - int installed; - PyMemAllocatorEx raw; - PyMemAllocatorEx mem; - PyMemAllocatorEx obj; -} FmHook; - -static struct { - int start; - int stop; - Py_ssize_t count; -} FmData; - -static int -fm_nomemory(void) -{ - FmData.count++; - if (FmData.count > FmData.start && - (FmData.stop <= 0 || FmData.count <= FmData.stop)) { - return 1; - } - return 0; -} - -static void * -hook_fmalloc(void *ctx, size_t size) -{ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (fm_nomemory()) { - return NULL; - } - return alloc->malloc(alloc->ctx, size); -} - -static void * -hook_fcalloc(void *ctx, size_t nelem, size_t elsize) -{ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (fm_nomemory()) { - return NULL; - } - return alloc->calloc(alloc->ctx, nelem, elsize); +{ + PyObject *obj = PyLong_FromLong(0); + Py_INCREF(_test_incref(obj)); + Py_DECREF(obj); + Py_DECREF(obj); + Py_DECREF(obj); + Py_RETURN_NONE; } -static void * -hook_frealloc(void *ctx, void *ptr, size_t new_size) +static PyObject * +test_xdecref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (fm_nomemory()) { - return NULL; - } - return alloc->realloc(alloc->ctx, ptr, new_size); + Py_XDECREF(PyLong_FromLong(0)); + Py_RETURN_NONE; } -static void -hook_ffree(void *ctx, void *ptr) +static PyObject * +test_decref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - alloc->free(alloc->ctx, ptr); + Py_DECREF(PyLong_FromLong(0)); + Py_RETURN_NONE; } -static void -fm_setup_hooks(void) +static PyObject * +test_structseq_newtype_doesnt_leak(PyObject *Py_UNUSED(self), + PyObject *Py_UNUSED(args)) { - PyMemAllocatorEx alloc; - - if (FmHook.installed) { - return; - } - FmHook.installed = 1; + PyStructSequence_Desc descr; + PyStructSequence_Field descr_fields[3]; - alloc.malloc = hook_fmalloc; - alloc.calloc = hook_fcalloc; - alloc.realloc = hook_frealloc; - alloc.free = hook_ffree; - PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); - PyMem_GetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); - PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); + descr_fields[0] = (PyStructSequence_Field){"foo", "foo value"}; + descr_fields[1] = (PyStructSequence_Field){NULL, "some hidden value"}; + descr_fields[2] = (PyStructSequence_Field){0, NULL}; - alloc.ctx = &FmHook.raw; - PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); + descr.name = "_testcapi.test_descr"; + descr.doc = "This is used to test for memory leaks in NewType"; + descr.fields = descr_fields; + descr.n_in_sequence = 1; - alloc.ctx = &FmHook.mem; - PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &alloc); + PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); + assert(structseq_type != NULL); + assert(PyType_Check(structseq_type)); + assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); + Py_DECREF(structseq_type); - alloc.ctx = &FmHook.obj; - PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); + Py_RETURN_NONE; } -static void -fm_remove_hooks(void) +static PyObject * +test_structseq_newtype_null_descr_doc(PyObject *Py_UNUSED(self), + PyObject *Py_UNUSED(args)) { - if (FmHook.installed) { - FmHook.installed = 0; - PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); - PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); - PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); - } -} + PyStructSequence_Field descr_fields[1] = { + (PyStructSequence_Field){NULL, NULL} + }; + // Test specifically for NULL .doc field. + PyStructSequence_Desc descr = {"_testcapi.test_descr", NULL, &descr_fields[0], 0}; + + PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); + assert(structseq_type != NULL); + assert(PyType_Check(structseq_type)); + assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); + Py_DECREF(structseq_type); -static PyObject* -set_nomemory(PyObject *self, PyObject *args) -{ - /* Memory allocation fails after 'start' allocation requests, and until - * 'stop' allocation requests except when 'stop' is negative or equal - * to 0 (default) in which case allocation failures never stop. */ - FmData.count = 0; - FmData.stop = 0; - if (!PyArg_ParseTuple(args, "i|i", &FmData.start, &FmData.stop)) { - return NULL; - } - fm_setup_hooks(); Py_RETURN_NONE; } -static PyObject* -remove_mem_hooks(PyObject *self, PyObject *Py_UNUSED(ignored)) +static PyObject * +test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) { - fm_remove_hooks(); + PyObject *obj = PyLong_FromLong(0); + Py_IncRef(obj); + Py_DecRef(obj); + Py_DecRef(obj); Py_RETURN_NONE; } -PyDoc_STRVAR(docstring_empty, -"" -); - -PyDoc_STRVAR(docstring_no_signature, -"This docstring has no signature." -); - -PyDoc_STRVAR(docstring_with_invalid_signature, -"docstring_with_invalid_signature($module, /, boo)\n" -"\n" -"This docstring has an invalid signature." -); - -PyDoc_STRVAR(docstring_with_invalid_signature2, -"docstring_with_invalid_signature2($module, /, boo)\n" -"\n" -"--\n" -"\n" -"This docstring also has an invalid signature." -); - -PyDoc_STRVAR(docstring_with_signature, -"docstring_with_signature($module, /, sig)\n" -"--\n" -"\n" -"This docstring has a valid signature." -); - -PyDoc_STRVAR(docstring_with_signature_but_no_doc, -"docstring_with_signature_but_no_doc($module, /, sig)\n" -"--\n" -"\n" -); - -PyDoc_STRVAR(docstring_with_signature_and_extra_newlines, -"docstring_with_signature_and_extra_newlines($module, /, parameter)\n" -"--\n" -"\n" -"\n" -"This docstring has a valid signature and some extra newlines." -); - -PyDoc_STRVAR(docstring_with_signature_with_defaults, -"docstring_with_signature_with_defaults(module, s='avocado',\n" -" b=b'bytes', d=3.14, i=35, n=None, t=True, f=False,\n" -" local=the_number_three, sys=sys.maxsize,\n" -" exp=sys.maxsize - 1)\n" -"--\n" -"\n" -"\n" -"\n" -"This docstring has a valid signature with parameters,\n" -"and the parameters take defaults of varying types." -); - typedef struct { PyThread_type_lock start_event; PyThread_type_lock exit_event; @@ -4074,8 +1850,7 @@ call_in_temporary_c_thread(PyObject *self, PyObject *callback) goto exit; } - Py_INCREF(callback); - test_c_thread.callback = callback; + test_c_thread.callback = Py_NewRef(callback); PyThread_acquire_lock(test_c_thread.start_event, 1); PyThread_acquire_lock(test_c_thread.exit_event, 1); @@ -4096,8 +1871,7 @@ call_in_temporary_c_thread(PyObject *self, PyObject *callback) PyThread_release_lock(test_c_thread.exit_event); Py_END_ALLOW_THREADS - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); exit: Py_CLEAR(test_c_thread.callback); @@ -4255,356 +2029,40 @@ pymarshal_read_object_from_file(PyObject* self, PyObject *args) } obj = PyMarshal_ReadObjectFromFile(fp); - pos = ftell(fp); - - fclose(fp); - return Py_BuildValue("Nl", obj, pos); -} - -static PyObject* -return_null_without_error(PyObject *self, PyObject *args) -{ - /* invalid call: return NULL without setting an error, - * _Py_CheckFunctionResult() must detect such bug at runtime. */ - PyErr_Clear(); - return NULL; -} - -static PyObject* -return_result_with_error(PyObject *self, PyObject *args) -{ - /* invalid call: return a result with an error set, - * _Py_CheckFunctionResult() must detect such bug at runtime. */ - PyErr_SetNone(PyExc_ValueError); - Py_RETURN_NONE; -} - -static PyObject* -getitem_with_error(PyObject *self, PyObject *args) -{ - PyObject *map, *key; - if (!PyArg_ParseTuple(args, "OO", &map, &key)) { - return NULL; - } - - PyErr_SetString(PyExc_ValueError, "bug"); - return PyObject_GetItem(map, key); -} - -static PyObject * -test_pytime_fromseconds(PyObject *self, PyObject *args) -{ - int seconds; - if (!PyArg_ParseTuple(args, "i", &seconds)) { - return NULL; - } - _PyTime_t ts = _PyTime_FromSeconds(seconds); - return _PyTime_AsNanosecondsObject(ts); -} - -static PyObject * -test_pytime_fromsecondsobject(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t ts; - if (_PyTime_FromSecondsObject(&ts, obj, round) == -1) { - return NULL; - } - return _PyTime_AsNanosecondsObject(ts); -} - -static PyObject * -test_pytime_assecondsdouble(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "O", &obj)) { - return NULL; - } - _PyTime_t ts; - if (_PyTime_FromNanosecondsObject(&ts, obj) < 0) { - return NULL; - } - double d = _PyTime_AsSecondsDouble(ts); - return PyFloat_FromDouble(d); -} - -static PyObject * -test_PyTime_AsTimeval(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timeval tv; - if (_PyTime_AsTimeval(t, &tv, round) < 0) { - return NULL; - } - - PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); - if (seconds == NULL) { - return NULL; - } - return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); -} - -static PyObject * -test_PyTime_AsTimeval_clamp(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timeval tv; - _PyTime_AsTimeval_clamp(t, &tv, round); - - PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); - if (seconds == NULL) { - return NULL; - } - return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); -} - -#ifdef HAVE_CLOCK_GETTIME -static PyObject * -test_PyTime_AsTimespec(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "O", &obj)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timespec ts; - if (_PyTime_AsTimespec(t, &ts) == -1) { - return NULL; - } - return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); -} - -static PyObject * -test_PyTime_AsTimespec_clamp(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "O", &obj)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timespec ts; - _PyTime_AsTimespec_clamp(t, &ts); - return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); -} -#endif - -static PyObject * -test_PyTime_AsMilliseconds(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t ms = _PyTime_AsMilliseconds(t, round); - _PyTime_t ns = _PyTime_FromNanoseconds(ms); - return _PyTime_AsNanosecondsObject(ns); -} - -static PyObject * -test_PyTime_AsMicroseconds(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t us = _PyTime_AsMicroseconds(t, round); - _PyTime_t ns = _PyTime_FromNanoseconds(us); - return _PyTime_AsNanosecondsObject(ns); -} - -static PyObject* -pymem_buffer_overflow(PyObject *self, PyObject *args) -{ - char *buffer; - - /* Deliberate buffer overflow to check that PyMem_Free() detects - the overflow when debug hooks are installed. */ - buffer = PyMem_Malloc(16); - if (buffer == NULL) { - PyErr_NoMemory(); - return NULL; - } - buffer[16] = 'x'; - PyMem_Free(buffer); - - Py_RETURN_NONE; -} - -static PyObject* -pymem_api_misuse(PyObject *self, PyObject *args) -{ - char *buffer; - - /* Deliberate misusage of Python allocators: - allococate with PyMem but release with PyMem_Raw. */ - buffer = PyMem_Malloc(16); - PyMem_RawFree(buffer); - - Py_RETURN_NONE; -} - -static PyObject* -pymem_malloc_without_gil(PyObject *self, PyObject *args) -{ - char *buffer; - - /* Deliberate bug to test debug hooks on Python memory allocators: - call PyMem_Malloc() without holding the GIL */ - Py_BEGIN_ALLOW_THREADS - buffer = PyMem_Malloc(10); - Py_END_ALLOW_THREADS - - PyMem_Free(buffer); - - Py_RETURN_NONE; -} - - -static PyObject* -test_pymem_getallocatorsname(PyObject *self, PyObject *args) -{ - const char *name = _PyMem_GetCurrentAllocatorName(); - if (name == NULL) { - PyErr_SetString(PyExc_RuntimeError, "cannot get allocators name"); - return NULL; - } - return PyUnicode_FromString(name); -} - - -static PyObject* -test_pyobject_is_freed(const char *test_name, PyObject *op) -{ - if (!_PyObject_IsFreed(op)) { - return raiseTestError(test_name, "object is not seen as freed"); - } - Py_RETURN_NONE; -} - - -static PyObject* -check_pyobject_null_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *op = NULL; - return test_pyobject_is_freed("check_pyobject_null_is_freed", op); -} - - -static PyObject* -check_pyobject_uninitialized_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); - if (op == NULL) { - return NULL; - } - /* Initialize reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* object fields like ob_type are uninitialized! */ - return test_pyobject_is_freed("check_pyobject_uninitialized_is_freed", op); -} + pos = ftell(fp); + fclose(fp); + return Py_BuildValue("Nl", obj, pos); +} static PyObject* -check_pyobject_forbidden_bytes_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +return_null_without_error(PyObject *self, PyObject *args) { - /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ - PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); - if (op == NULL) { - return NULL; - } - /* Initialize reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* ob_type field is after the memory block: part of "forbidden bytes" - when using debug hooks on memory allocators! */ - return test_pyobject_is_freed("check_pyobject_forbidden_bytes_is_freed", op); + /* invalid call: return NULL without setting an error, + * _Py_CheckFunctionResult() must detect such bug at runtime. */ + PyErr_Clear(); + return NULL; } - static PyObject* -check_pyobject_freed_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +return_result_with_error(PyObject *self, PyObject *args) { - /* This test would fail if run with the address sanitizer */ -#ifdef _Py_ADDRESS_SANITIZER + /* invalid call: return a result with an error set, + * _Py_CheckFunctionResult() must detect such bug at runtime. */ + PyErr_SetNone(PyExc_ValueError); Py_RETURN_NONE; -#else - PyObject *op = PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type); - if (op == NULL) { - return NULL; - } - Py_TYPE(op)->tp_dealloc(op); - /* Reset reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* object memory is freed! */ - return test_pyobject_is_freed("check_pyobject_freed_is_freed", op); -#endif } - static PyObject* -pyobject_malloc_without_gil(PyObject *self, PyObject *args) +getitem_with_error(PyObject *self, PyObject *args) { - char *buffer; - - /* Deliberate bug to test debug hooks on Python memory allocators: - call PyObject_Malloc() without holding the GIL */ - Py_BEGIN_ALLOW_THREADS - buffer = PyObject_Malloc(10); - Py_END_ALLOW_THREADS - - PyObject_Free(buffer); + PyObject *map, *key; + if (!PyArg_ParseTuple(args, "OO", &map, &key)) { + return NULL; + } - Py_RETURN_NONE; + PyErr_SetString(PyExc_ValueError, "bug"); + return PyObject_GetItem(map, key); } static PyObject * @@ -4699,7 +2157,6 @@ dict_get_version(PyObject *self, PyObject *args) static PyObject * raise_SIGINT_then_send_None(PyObject *self, PyObject *args) { - _Py_IDENTIFIER(send); PyGenObject *gen; if (!PyArg_ParseTuple(args, "O!", &PyGen_Type, &gen)) @@ -4716,7 +2173,7 @@ raise_SIGINT_then_send_None(PyObject *self, PyObject *args) because we check for signals before every bytecode operation. */ raise(SIGINT); - return _PyObject_CallMethodIdOneArg((PyObject *)gen, &PyId_send, Py_None); + return PyObject_CallMethod((PyObject *)gen, "send", "O", Py_None); } @@ -4794,6 +2251,37 @@ mapping_has_key(PyObject* self, PyObject *args) return PyLong_FromLong(PyMapping_HasKey(context, key)); } +static PyObject * +sequence_set_slice(PyObject* self, PyObject *args) +{ + PyObject *sequence, *obj; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "OnnO", &sequence, &i1, &i2, &obj)) { + return NULL; + } + + int res = PySequence_SetSlice(sequence, i1, i2, obj); + if (res == -1) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +sequence_del_slice(PyObject* self, PyObject *args) +{ + PyObject *sequence; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "Onn", &sequence, &i1, &i2)) { + return NULL; + } + + int res = PySequence_DelSlice(sequence, i1, i2); + if (res == -1) { + return NULL; + } + Py_RETURN_NONE; +} static PyObject * test_pythread_tss_key_state(PyObject *self, PyObject *args) @@ -5003,8 +2491,7 @@ _null_to_none(PyObject* obj) if (obj == NULL) { Py_RETURN_NONE; } - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } static PyObject* @@ -5068,7 +2555,6 @@ meth_fastcall_keywords(PyObject* self, PyObject* const* args, return Py_BuildValue("NNN", _null_to_none(self), pyargs, pykwargs); } - static PyObject* pynumber_tobase(PyObject *module, PyObject *args) { @@ -5081,7 +2567,6 @@ pynumber_tobase(PyObject *module, PyObject *args) return PyNumber_ToBase(obj, base); } - static PyObject* test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -5104,6 +2589,91 @@ test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored)) } +// Test Py_CLEAR() macro +static PyObject* +test_py_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // simple case with a variable + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + Py_CLEAR(obj); + assert(obj == NULL); + + // gh-98724: complex case, Py_CLEAR() argument has a side effect + PyObject* array[1]; + array[0] = PyList_New(0); + if (array[0] == NULL) { + return NULL; + } + + PyObject **p = array; + Py_CLEAR(*p++); + assert(array[0] == NULL); + assert(p == array + 1); + + Py_RETURN_NONE; +} + + +// Test Py_SETREF() and Py_XSETREF() macros, similar to test_py_clear() +static PyObject* +test_py_setref(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // Py_SETREF() simple case with a variable + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + Py_SETREF(obj, NULL); + assert(obj == NULL); + + // Py_XSETREF() simple case with a variable + PyObject *obj2 = PyList_New(0); + if (obj2 == NULL) { + return NULL; + } + Py_XSETREF(obj2, NULL); + assert(obj2 == NULL); + // test Py_XSETREF() when the argument is NULL + Py_XSETREF(obj2, NULL); + assert(obj2 == NULL); + + // gh-98724: complex case, Py_SETREF() argument has a side effect + PyObject* array[1]; + array[0] = PyList_New(0); + if (array[0] == NULL) { + return NULL; + } + + PyObject **p = array; + Py_SETREF(*p++, NULL); + assert(array[0] == NULL); + assert(p == array + 1); + + // gh-98724: complex case, Py_XSETREF() argument has a side effect + PyObject* array2[1]; + array2[0] = PyList_New(0); + if (array2[0] == NULL) { + return NULL; + } + + PyObject **p2 = array2; + Py_XSETREF(*p2++, NULL); + assert(array2[0] == NULL); + assert(p2 == array2 + 1); + + // test Py_XSETREF() when the argument is NULL + p2 = array2; + Py_XSETREF(*p2++, NULL); + assert(array2[0] == NULL); + assert(p2 == array2 + 1); + + Py_RETURN_NONE; +} + + #define TEST_REFCOUNT() \ do { \ PyObject *obj = PyList_New(0); \ @@ -5205,317 +2775,85 @@ test_fatal_error(PyObject *self, PyObject *args) char *message; int release_gil = 0; if (!PyArg_ParseTuple(args, "y|i:fatal_error", &message, &release_gil)) - return NULL; - if (release_gil) { - Py_BEGIN_ALLOW_THREADS - Py_FatalError(message); - Py_END_ALLOW_THREADS - } - else { - Py_FatalError(message); - } - // Py_FatalError() does not return, but exits the process. - Py_RETURN_NONE; -} - -// type->tp_version_tag -static PyObject * -type_get_version(PyObject *self, PyObject *type) -{ - if (!PyType_Check(type)) { - PyErr_SetString(PyExc_TypeError, "argument must be a type"); - return NULL; - } - PyObject *res = PyLong_FromUnsignedLong( - ((PyTypeObject *)type)->tp_version_tag); - if (res == NULL) { - assert(PyErr_Occurred()); - return NULL; - } - return res; -} - - -// Test PyThreadState C API -static PyObject * -test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) -{ - // PyThreadState_Get() - PyThreadState *tstate = PyThreadState_Get(); - assert(tstate != NULL); - - // PyThreadState_GET() - PyThreadState *tstate2 = PyThreadState_Get(); - assert(tstate2 == tstate); - - // private _PyThreadState_UncheckedGet() - PyThreadState *tstate3 = _PyThreadState_UncheckedGet(); - assert(tstate3 == tstate); - - // PyThreadState_EnterTracing(), PyThreadState_LeaveTracing() - PyThreadState_EnterTracing(tstate); - PyThreadState_LeaveTracing(tstate); - - // PyThreadState_GetDict(): no tstate argument - PyObject *dict = PyThreadState_GetDict(); - // PyThreadState_GetDict() API can return NULL if PyDict_New() fails, - // but it should not occur in practice. - assert(dict != NULL); - assert(PyDict_Check(dict)); - // dict is a borrowed reference - - // private _PyThreadState_GetDict() - PyObject *dict2 = _PyThreadState_GetDict(tstate); - assert(dict2 == dict); - // dict2 is a borrowed reference - - // PyThreadState_GetInterpreter() - PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate); - assert(interp != NULL); - - // PyThreadState_GetFrame() - PyFrameObject*frame = PyThreadState_GetFrame(tstate); - assert(frame != NULL); - assert(PyFrame_Check(frame)); - Py_DECREF(frame); - - // PyThreadState_GetID() - uint64_t id = PyThreadState_GetID(tstate); - assert(id >= 1); - - Py_RETURN_NONE; -} - - -// Test dict watching -static PyObject *g_dict_watch_events; -static int g_dict_watchers_installed; - -static int -dict_watch_callback(PyDict_WatchEvent event, - PyObject *dict, - PyObject *key, - PyObject *new_value) -{ - PyObject *msg; - switch(event) { - case PyDict_EVENT_CLEARED: - msg = PyUnicode_FromString("clear"); - break; - case PyDict_EVENT_DEALLOCATED: - msg = PyUnicode_FromString("dealloc"); - break; - case PyDict_EVENT_CLONED: - msg = PyUnicode_FromString("clone"); - break; - case PyDict_EVENT_ADDED: - msg = PyUnicode_FromFormat("new:%S:%S", key, new_value); - break; - case PyDict_EVENT_MODIFIED: - msg = PyUnicode_FromFormat("mod:%S:%S", key, new_value); - break; - case PyDict_EVENT_DELETED: - msg = PyUnicode_FromFormat("del:%S", key); - break; - default: - msg = PyUnicode_FromString("unknown"); - } - if (!msg) { - return -1; - } - assert(PyList_Check(g_dict_watch_events)); - if (PyList_Append(g_dict_watch_events, msg) < 0) { - Py_DECREF(msg); - return -1; - } - Py_DECREF(msg); - return 0; -} - -static int -dict_watch_callback_second(PyDict_WatchEvent event, - PyObject *dict, - PyObject *key, - PyObject *new_value) -{ - PyObject *msg = PyUnicode_FromString("second"); - if (!msg) { - return -1; - } - if (PyList_Append(g_dict_watch_events, msg) < 0) { - Py_DECREF(msg); - return -1; - } - Py_DECREF(msg); - return 0; -} - -static int -dict_watch_callback_error(PyDict_WatchEvent event, - PyObject *dict, - PyObject *key, - PyObject *new_value) -{ - PyErr_SetString(PyExc_RuntimeError, "boom!"); - return -1; -} - -static PyObject * -add_dict_watcher(PyObject *self, PyObject *kind) -{ - int watcher_id; - assert(PyLong_Check(kind)); - long kind_l = PyLong_AsLong(kind); - if (kind_l == 2) { - watcher_id = PyDict_AddWatcher(dict_watch_callback_second); - } else if (kind_l == 1) { - watcher_id = PyDict_AddWatcher(dict_watch_callback_error); - } else { - watcher_id = PyDict_AddWatcher(dict_watch_callback); - } - if (watcher_id < 0) { - return NULL; - } - if (!g_dict_watchers_installed) { - assert(!g_dict_watch_events); - if (!(g_dict_watch_events = PyList_New(0))) { - return NULL; - } - } - g_dict_watchers_installed++; - return PyLong_FromLong(watcher_id); -} - -static PyObject * -clear_dict_watcher(PyObject *self, PyObject *watcher_id) -{ - if (PyDict_ClearWatcher(PyLong_AsLong(watcher_id))) { - return NULL; - } - g_dict_watchers_installed--; - if (!g_dict_watchers_installed) { - assert(g_dict_watch_events); - Py_CLEAR(g_dict_watch_events); - } - Py_RETURN_NONE; -} - -static PyObject * -watch_dict(PyObject *self, PyObject *args) -{ - PyObject *dict; - int watcher_id; - if (!PyArg_ParseTuple(args, "iO", &watcher_id, &dict)) { - return NULL; - } - if (PyDict_Watch(watcher_id, dict)) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -unwatch_dict(PyObject *self, PyObject *args) -{ - PyObject *dict; - int watcher_id; - if (!PyArg_ParseTuple(args, "iO", &watcher_id, &dict)) { - return NULL; - } - if (PyDict_Unwatch(watcher_id, dict)) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -get_dict_watcher_events(PyObject *self, PyObject *Py_UNUSED(args)) -{ - if (!g_dict_watch_events) { - PyErr_SetString(PyExc_RuntimeError, "no watchers active"); - return NULL; - } - Py_INCREF(g_dict_watch_events); - return g_dict_watch_events; -} - - -// Test PyFloat_Pack2(), PyFloat_Pack4() and PyFloat_Pack8() -static PyObject * -test_float_pack(PyObject *self, PyObject *args) -{ - int size; - double d; - int le; - if (!PyArg_ParseTuple(args, "idi", &size, &d, &le)) { - return NULL; - } - switch (size) - { - case 2: - { - char data[2]; - if (PyFloat_Pack2(d, data, le) < 0) { - return NULL; - } - return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); - } - case 4: - { - char data[4]; - if (PyFloat_Pack4(d, data, le) < 0) { - return NULL; - } - return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); - } - case 8: - { - char data[8]; - if (PyFloat_Pack8(d, data, le) < 0) { - return NULL; - } - return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); + return NULL; + if (release_gil) { + Py_BEGIN_ALLOW_THREADS + Py_FatalError(message); + Py_END_ALLOW_THREADS } - default: break; + else { + Py_FatalError(message); } - - PyErr_SetString(PyExc_ValueError, "size must 2, 4 or 8"); - return NULL; + // Py_FatalError() does not return, but exits the process. + Py_RETURN_NONE; } - -// Test PyFloat_Unpack2(), PyFloat_Unpack4() and PyFloat_Unpack8() +// type->tp_version_tag static PyObject * -test_float_unpack(PyObject *self, PyObject *args) +type_get_version(PyObject *self, PyObject *type) { - assert(!PyErr_Occurred()); - const char *data; - Py_ssize_t size; - int le; - if (!PyArg_ParseTuple(args, "y#i", &data, &size, &le)) { + if (!PyType_Check(type)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); return NULL; } - double d; - switch (size) - { - case 2: - d = PyFloat_Unpack2(data, le); - break; - case 4: - d = PyFloat_Unpack4(data, le); - break; - case 8: - d = PyFloat_Unpack8(data, le); - break; - default: - PyErr_SetString(PyExc_ValueError, "data length must 2, 4 or 8 bytes"); + PyObject *res = PyLong_FromUnsignedLong( + ((PyTypeObject *)type)->tp_version_tag); + if (res == NULL) { + assert(PyErr_Occurred()); return NULL; } + return res; +} - if (d == -1.0 && PyErr_Occurred()) { - return NULL; - } - return PyFloat_FromDouble(d); + +// Test PyThreadState C API +static PyObject * +test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) +{ + // PyThreadState_Get() + PyThreadState *tstate = PyThreadState_Get(); + assert(tstate != NULL); + + // PyThreadState_GET() + PyThreadState *tstate2 = PyThreadState_Get(); + assert(tstate2 == tstate); + + // private _PyThreadState_UncheckedGet() + PyThreadState *tstate3 = _PyThreadState_UncheckedGet(); + assert(tstate3 == tstate); + + // PyThreadState_EnterTracing(), PyThreadState_LeaveTracing() + PyThreadState_EnterTracing(tstate); + PyThreadState_LeaveTracing(tstate); + + // PyThreadState_GetDict(): no tstate argument + PyObject *dict = PyThreadState_GetDict(); + // PyThreadState_GetDict() API can return NULL if PyDict_New() fails, + // but it should not occur in practice. + assert(dict != NULL); + assert(PyDict_Check(dict)); + // dict is a borrowed reference + + // private _PyThreadState_GetDict() + PyObject *dict2 = _PyThreadState_GetDict(tstate); + assert(dict2 == dict); + // dict2 is a borrowed reference + + // PyThreadState_GetInterpreter() + PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate); + assert(interp != NULL); + + // PyThreadState_GetFrame() + PyFrameObject*frame = PyThreadState_GetFrame(tstate); + assert(frame != NULL); + assert(PyFrame_Check(frame)); + Py_DECREF(frame); + + // PyThreadState_GetID() + uint64_t id = PyThreadState_GetID(tstate); + assert(id >= 1); + + Py_RETURN_NONE; } static PyObject * @@ -5573,12 +2911,50 @@ frame_getlasti(PyObject *self, PyObject *frame) return PyLong_FromLong(lasti); } +static PyObject * +test_frame_getvar(PyObject *self, PyObject *args) +{ + PyObject *frame, *name; + if (!PyArg_ParseTuple(args, "OO", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVar((PyFrameObject *)frame, name); +} + +static PyObject * +test_frame_getvarstring(PyObject *self, PyObject *args) +{ + PyObject *frame; + const char *name; + if (!PyArg_ParseTuple(args, "Oy", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVarString((PyFrameObject *)frame, name); +} + + static PyObject * eval_get_func_name(PyObject *self, PyObject *func) { return PyUnicode_FromString(PyEval_GetFuncName(func)); } +static PyObject * +eval_get_func_desc(PyObject *self, PyObject *func) +{ + return PyUnicode_FromString(PyEval_GetFuncDesc(func)); +} + static PyObject * get_feature_macros(PyObject *self, PyObject *Py_UNUSED(args)) { @@ -5767,8 +3143,7 @@ function_get_code(PyObject *self, PyObject *func) { PyObject *code = PyFunction_GetCode(func); if (code != NULL) { - Py_INCREF(code); - return code; + return Py_NewRef(code); } else { return NULL; } @@ -5779,8 +3154,7 @@ function_get_globals(PyObject *self, PyObject *func) { PyObject *globals = PyFunction_GetGlobals(func); if (globals != NULL) { - Py_INCREF(globals); - return globals; + return Py_NewRef(globals); } else { return NULL; } @@ -5791,8 +3165,7 @@ function_get_module(PyObject *self, PyObject *func) { PyObject *module = PyFunction_GetModule(func); if (module != NULL) { - Py_INCREF(module); - return module; + return Py_NewRef(module); } else { return NULL; } @@ -5803,8 +3176,7 @@ function_get_defaults(PyObject *self, PyObject *func) { PyObject *defaults = PyFunction_GetDefaults(func); if (defaults != NULL) { - Py_INCREF(defaults); - return defaults; + return Py_NewRef(defaults); } else if (PyErr_Occurred()) { return NULL; } else { @@ -5825,132 +3197,33 @@ function_set_defaults(PyObject *self, PyObject *args) Py_RETURN_NONE; } - -// type watchers - -static PyObject *g_type_modified_events; -static int g_type_watchers_installed; - -static int -type_modified_callback(PyTypeObject *type) -{ - assert(PyList_Check(g_type_modified_events)); - if(PyList_Append(g_type_modified_events, (PyObject *)type) < 0) { - return -1; - } - return 0; -} - -static int -type_modified_callback_wrap(PyTypeObject *type) -{ - assert(PyList_Check(g_type_modified_events)); - PyObject *list = PyList_New(0); - if (!list) { - return -1; - } - if (PyList_Append(list, (PyObject *)type) < 0) { - Py_DECREF(list); - return -1; - } - if (PyList_Append(g_type_modified_events, list) < 0) { - Py_DECREF(list); - return -1; - } - Py_DECREF(list); - return 0; -} - -static int -type_modified_callback_error(PyTypeObject *type) -{ - PyErr_SetString(PyExc_RuntimeError, "boom!"); - return -1; -} - -static PyObject * -add_type_watcher(PyObject *self, PyObject *kind) -{ - int watcher_id; - assert(PyLong_Check(kind)); - long kind_l = PyLong_AsLong(kind); - if (kind_l == 2) { - watcher_id = PyType_AddWatcher(type_modified_callback_wrap); - } else if (kind_l == 1) { - watcher_id = PyType_AddWatcher(type_modified_callback_error); - } else { - watcher_id = PyType_AddWatcher(type_modified_callback); - } - if (watcher_id < 0) { - return NULL; - } - if (!g_type_watchers_installed) { - assert(!g_type_modified_events); - if (!(g_type_modified_events = PyList_New(0))) { - return NULL; - } - } - g_type_watchers_installed++; - return PyLong_FromLong(watcher_id); -} - -static PyObject * -clear_type_watcher(PyObject *self, PyObject *watcher_id) -{ - if (PyType_ClearWatcher(PyLong_AsLong(watcher_id))) { - return NULL; - } - g_type_watchers_installed--; - if (!g_type_watchers_installed) { - assert(g_type_modified_events); - Py_CLEAR(g_type_modified_events); - } - Py_RETURN_NONE; -} - -static PyObject * -get_type_modified_events(PyObject *self, PyObject *Py_UNUSED(args)) -{ - if (!g_type_modified_events) { - PyErr_SetString(PyExc_RuntimeError, "no watchers active"); - return NULL; - } - Py_INCREF(g_type_modified_events); - return g_type_modified_events; -} - static PyObject * -watch_type(PyObject *self, PyObject *args) +function_get_kw_defaults(PyObject *self, PyObject *func) { - PyObject *type; - int watcher_id; - if (!PyArg_ParseTuple(args, "iO", &watcher_id, &type)) { - return NULL; - } - if (PyType_Watch(watcher_id, type)) { + PyObject *defaults = PyFunction_GetKwDefaults(func); + if (defaults != NULL) { + return Py_NewRef(defaults); + } else if (PyErr_Occurred()) { return NULL; + } else { + Py_RETURN_NONE; // This can happen when `kwdefaults` are set to `None` } - Py_RETURN_NONE; } static PyObject * -unwatch_type(PyObject *self, PyObject *args) +function_set_kw_defaults(PyObject *self, PyObject *args) { - PyObject *type; - int watcher_id; - if (!PyArg_ParseTuple(args, "iO", &watcher_id, &type)) { + PyObject *func = NULL, *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { return NULL; } - if (PyType_Unwatch(watcher_id, type)) { + int result = PyFunction_SetKwDefaults(func, defaults); + if (result == -1) return NULL; - } Py_RETURN_NONE; } - static PyObject *test_buildvalue_issue38913(PyObject *, PyObject *); -static PyObject *getargs_s_hash_int(PyObject *, PyObject *, PyObject*); -static PyObject *getargs_s_hash_int2(PyObject *, PyObject *, PyObject*); static PyMethodDef TestMethods[] = { {"raise_exception", raise_exception, METH_VARARGS}, @@ -5958,33 +3231,11 @@ static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, {"test_sizeof_c_types", test_sizeof_c_types, METH_NOARGS}, - {"test_datetime_capi", test_datetime_capi, METH_NOARGS}, - {"datetime_check_date", datetime_check_date, METH_VARARGS}, - {"datetime_check_time", datetime_check_time, METH_VARARGS}, - {"datetime_check_datetime", datetime_check_datetime, METH_VARARGS}, - {"datetime_check_delta", datetime_check_delta, METH_VARARGS}, - {"datetime_check_tzinfo", datetime_check_tzinfo, METH_VARARGS}, - {"make_timezones_capi", make_timezones_capi, METH_NOARGS}, - {"get_timezones_offset_zero", get_timezones_offset_zero, METH_NOARGS}, - {"get_timezone_utc_capi", get_timezone_utc_capi, METH_VARARGS}, - {"get_date_fromdate", get_date_fromdate, METH_VARARGS}, - {"get_datetime_fromdateandtime", get_datetime_fromdateandtime, METH_VARARGS}, - {"get_datetime_fromdateandtimeandfold", get_datetime_fromdateandtimeandfold, METH_VARARGS}, - {"get_time_fromtime", get_time_fromtime, METH_VARARGS}, - {"get_time_fromtimeandfold", get_time_fromtimeandfold, METH_VARARGS}, - {"get_delta_fromdsu", get_delta_fromdsu, METH_VARARGS}, - {"get_date_fromtimestamp", get_date_fromtimestamp, METH_VARARGS}, - {"get_datetime_fromtimestamp", get_datetime_fromtimestamp, METH_VARARGS}, - {"PyDateTime_GET", test_PyDateTime_GET, METH_O}, - {"PyDateTime_DATE_GET", test_PyDateTime_DATE_GET, METH_O}, - {"PyDateTime_TIME_GET", test_PyDateTime_TIME_GET, METH_O}, - {"PyDateTime_DELTA_GET", test_PyDateTime_DELTA_GET, METH_O}, {"test_gc_control", test_gc_control, METH_NOARGS}, {"test_list_api", test_list_api, METH_NOARGS}, {"test_dict_iteration", test_dict_iteration, METH_NOARGS}, {"dict_getitem_knownhash", dict_getitem_knownhash, METH_VARARGS}, {"test_lazy_hash_inheritance", test_lazy_hash_inheritance,METH_NOARGS}, - {"test_long_api", test_long_api, METH_NOARGS}, {"test_xincref_doesnt_leak",test_xincref_doesnt_leak, METH_NOARGS}, {"test_incref_doesnt_leak", test_incref_doesnt_leak, METH_NOARGS}, {"test_xdecref_doesnt_leak",test_xdecref_doesnt_leak, METH_NOARGS}, @@ -5994,20 +3245,9 @@ static PyMethodDef TestMethods[] = { {"test_structseq_newtype_null_descr_doc", test_structseq_newtype_null_descr_doc, METH_NOARGS}, {"test_incref_decref_API", test_incref_decref_API, METH_NOARGS}, - {"test_long_and_overflow", test_long_and_overflow, METH_NOARGS}, - {"test_long_as_double", test_long_as_double, METH_NOARGS}, - {"test_long_as_size_t", test_long_as_size_t, METH_NOARGS}, - {"test_long_as_unsigned_long_long_mask", - test_long_as_unsigned_long_long_mask, METH_NOARGS}, - {"test_long_numbits", test_long_numbits, METH_NOARGS}, - {"test_k_code", test_k_code, METH_NOARGS}, - {"test_empty_argparse", test_empty_argparse, METH_NOARGS}, - {"parse_tuple_and_keywords", parse_tuple_and_keywords, METH_VARARGS}, {"pyobject_repr_from_null", pyobject_repr_from_null, METH_NOARGS}, {"pyobject_str_from_null", pyobject_str_from_null, METH_NOARGS}, {"pyobject_bytes_from_null", pyobject_bytes_from_null, METH_NOARGS}, - {"test_with_docstring", test_with_docstring, METH_NOARGS, - PyDoc_STR("This is a pretty normal docstring.")}, {"test_string_to_double", test_string_to_double, METH_NOARGS}, {"test_capsule", (PyCFunction)test_capsule, METH_NOARGS}, {"test_from_contiguous", (PyCFunction)test_from_contiguous, METH_NOARGS}, @@ -6018,66 +3258,9 @@ static PyMethodDef TestMethods[] = { {"PyBuffer_SizeFromFormat", test_PyBuffer_SizeFromFormat, METH_VARARGS}, {"test_buildvalue_N", test_buildvalue_N, METH_NOARGS}, {"test_buildvalue_issue38913", test_buildvalue_issue38913, METH_NOARGS}, - {"get_args", get_args, METH_VARARGS}, {"test_get_statictype_slots", test_get_statictype_slots, METH_NOARGS}, {"test_get_type_name", test_get_type_name, METH_NOARGS}, {"test_get_type_qualname", test_get_type_qualname, METH_NOARGS}, - {"get_kwargs", _PyCFunction_CAST(get_kwargs), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_tuple", getargs_tuple, METH_VARARGS}, - {"getargs_keywords", _PyCFunction_CAST(getargs_keywords), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_keyword_only", _PyCFunction_CAST(getargs_keyword_only), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_positional_only_and_keywords", - _PyCFunction_CAST(getargs_positional_only_and_keywords), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_b", getargs_b, METH_VARARGS}, - {"getargs_B", getargs_B, METH_VARARGS}, - {"getargs_h", getargs_h, METH_VARARGS}, - {"getargs_H", getargs_H, METH_VARARGS}, - {"getargs_I", getargs_I, METH_VARARGS}, - {"getargs_k", getargs_k, METH_VARARGS}, - {"getargs_i", getargs_i, METH_VARARGS}, - {"getargs_l", getargs_l, METH_VARARGS}, - {"getargs_n", getargs_n, METH_VARARGS}, - {"getargs_p", getargs_p, METH_VARARGS}, - {"getargs_L", getargs_L, METH_VARARGS}, - {"getargs_K", getargs_K, METH_VARARGS}, - {"test_longlong_api", test_longlong_api, METH_NOARGS}, - {"test_long_long_and_overflow",test_long_long_and_overflow, METH_NOARGS}, - {"test_L_code", test_L_code, METH_NOARGS}, - {"getargs_f", getargs_f, METH_VARARGS}, - {"getargs_d", getargs_d, METH_VARARGS}, - {"getargs_D", getargs_D, METH_VARARGS}, - {"getargs_S", getargs_S, METH_VARARGS}, - {"getargs_Y", getargs_Y, METH_VARARGS}, - {"getargs_U", getargs_U, METH_VARARGS}, - {"getargs_c", getargs_c, METH_VARARGS}, - {"getargs_C", getargs_C, METH_VARARGS}, - {"getargs_s", getargs_s, METH_VARARGS}, - {"getargs_s_star", getargs_s_star, METH_VARARGS}, - {"getargs_s_hash", getargs_s_hash, METH_VARARGS}, - {"getargs_s_hash_int", _PyCFunction_CAST(getargs_s_hash_int), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_s_hash_int2", _PyCFunction_CAST(getargs_s_hash_int2), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_z", getargs_z, METH_VARARGS}, - {"getargs_z_star", getargs_z_star, METH_VARARGS}, - {"getargs_z_hash", getargs_z_hash, METH_VARARGS}, - {"getargs_y", getargs_y, METH_VARARGS}, - {"getargs_y_star", getargs_y_star, METH_VARARGS}, - {"getargs_y_hash", getargs_y_hash, METH_VARARGS}, - {"getargs_u", getargs_u, METH_VARARGS}, - {"getargs_u_hash", getargs_u_hash, METH_VARARGS}, - {"getargs_Z", getargs_Z, METH_VARARGS}, - {"getargs_Z_hash", getargs_Z_hash, METH_VARARGS}, - {"getargs_w_star", getargs_w_star, METH_VARARGS}, - {"getargs_es", getargs_es, METH_VARARGS}, - {"getargs_et", getargs_et, METH_VARARGS}, - {"getargs_es_hash", getargs_es_hash, METH_VARARGS}, - {"getargs_et_hash", getargs_et_hash, METH_VARARGS}, - {"test_s_code", test_s_code, METH_NOARGS}, {"_test_thread_state", test_thread_state, METH_VARARGS}, {"_pending_threadfunc", pending_threadfunc, METH_VARARGS}, #ifdef HAVE_GETTIMEOFDAY @@ -6098,46 +3281,8 @@ static PyMethodDef TestMethods[] = { {"run_in_subinterp_with_config", _PyCFunction_CAST(run_in_subinterp_with_config), METH_VARARGS | METH_KEYWORDS}, - {"pytime_object_to_time_t", test_pytime_object_to_time_t, METH_VARARGS}, - {"pytime_object_to_timeval", test_pytime_object_to_timeval, METH_VARARGS}, - {"pytime_object_to_timespec", test_pytime_object_to_timespec, METH_VARARGS}, {"with_tp_del", with_tp_del, METH_VARARGS}, {"create_cfunction", create_cfunction, METH_NOARGS}, - {"test_pymem_alloc0", test_pymem_alloc0, METH_NOARGS}, - {"test_pyobject_new", test_pyobject_new, METH_NOARGS}, - {"test_pymem_setrawallocators",test_pymem_setrawallocators, METH_NOARGS}, - {"test_pymem_setallocators",test_pymem_setallocators, METH_NOARGS}, - {"test_pyobject_setallocators",test_pyobject_setallocators, METH_NOARGS}, - {"set_nomemory", (PyCFunction)set_nomemory, METH_VARARGS, - PyDoc_STR("set_nomemory(start:int, stop:int = 0)")}, - {"remove_mem_hooks", remove_mem_hooks, METH_NOARGS, - PyDoc_STR("Remove memory hooks.")}, - {"no_docstring", - (PyCFunction)test_with_docstring, METH_NOARGS}, - {"docstring_empty", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_empty}, - {"docstring_no_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_no_signature}, - {"docstring_with_invalid_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_invalid_signature}, - {"docstring_with_invalid_signature2", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_invalid_signature2}, - {"docstring_with_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature}, - {"docstring_with_signature_but_no_doc", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature_but_no_doc}, - {"docstring_with_signature_and_extra_newlines", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature_and_extra_newlines}, - {"docstring_with_signature_with_defaults", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature_with_defaults}, {"call_in_temporary_c_thread", call_in_temporary_c_thread, METH_O, PyDoc_STR("set_error_class(error_class) -> None")}, {"pymarshal_write_long_to_file", @@ -6156,26 +3301,6 @@ static PyMethodDef TestMethods[] = { {"return_result_with_error", return_result_with_error, METH_NOARGS}, {"getitem_with_error", getitem_with_error, METH_VARARGS}, {"Py_CompileString", pycompilestring, METH_O}, - {"PyTime_FromSeconds", test_pytime_fromseconds, METH_VARARGS}, - {"PyTime_FromSecondsObject", test_pytime_fromsecondsobject, METH_VARARGS}, - {"PyTime_AsSecondsDouble", test_pytime_assecondsdouble, METH_VARARGS}, - {"PyTime_AsTimeval", test_PyTime_AsTimeval, METH_VARARGS}, - {"PyTime_AsTimeval_clamp", test_PyTime_AsTimeval_clamp, METH_VARARGS}, -#ifdef HAVE_CLOCK_GETTIME - {"PyTime_AsTimespec", test_PyTime_AsTimespec, METH_VARARGS}, - {"PyTime_AsTimespec_clamp", test_PyTime_AsTimespec_clamp, METH_VARARGS}, -#endif - {"PyTime_AsMilliseconds", test_PyTime_AsMilliseconds, METH_VARARGS}, - {"PyTime_AsMicroseconds", test_PyTime_AsMicroseconds, METH_VARARGS}, - {"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS}, - {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, - {"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS}, - {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, - {"check_pyobject_null_is_freed", check_pyobject_null_is_freed, METH_NOARGS}, - {"check_pyobject_uninitialized_is_freed", check_pyobject_uninitialized_is_freed, METH_NOARGS}, - {"check_pyobject_forbidden_bytes_is_freed", check_pyobject_forbidden_bytes_is_freed, METH_NOARGS}, - {"check_pyobject_freed_is_freed", check_pyobject_freed_is_freed, METH_NOARGS}, - {"pyobject_malloc_without_gil", pyobject_malloc_without_gil, METH_NOARGS}, {"tracemalloc_track", tracemalloc_track, METH_VARARGS}, {"tracemalloc_untrack", tracemalloc_untrack, METH_VARARGS}, {"tracemalloc_get_traceback", tracemalloc_get_traceback, METH_VARARGS}, @@ -6190,6 +3315,8 @@ static PyMethodDef TestMethods[] = { {"get_mapping_items", get_mapping_items, METH_O}, {"test_mapping_has_key_string", test_mapping_has_key_string, METH_NOARGS}, {"mapping_has_key", mapping_has_key, METH_VARARGS}, + {"sequence_set_slice", sequence_set_slice, METH_VARARGS}, + {"sequence_del_slice", sequence_del_slice, METH_VARARGS}, {"test_pythread_tss_key_state", test_pythread_tss_key_state, METH_VARARGS}, {"hamt", new_hamt, METH_NOARGS}, {"bad_get", _PyCFunction_CAST(bad_get), METH_FASTCALL}, @@ -6210,6 +3337,8 @@ static PyMethodDef TestMethods[] = { {"pynumber_tobase", pynumber_tobase, METH_VARARGS}, {"without_gc", without_gc, METH_O}, {"test_set_type_size", test_set_type_size, METH_NOARGS}, + {"test_py_clear", test_py_clear, METH_NOARGS}, + {"test_py_setref", test_py_setref, METH_NOARGS}, {"test_refcount_macros", test_refcount_macros, METH_NOARGS}, {"test_refcount_funcs", test_refcount_funcs, METH_NOARGS}, {"test_py_is_macros", test_py_is_macros, METH_NOARGS}, @@ -6218,178 +3347,30 @@ static PyMethodDef TestMethods[] = { PyDoc_STR("fatal_error(message, release_gil=False): call Py_FatalError(message)")}, {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, {"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL}, - {"float_pack", test_float_pack, METH_VARARGS, NULL}, - {"float_unpack", test_float_unpack, METH_VARARGS, NULL}, {"frame_getlocals", frame_getlocals, METH_O, NULL}, {"frame_getglobals", frame_getglobals, METH_O, NULL}, {"frame_getgenerator", frame_getgenerator, METH_O, NULL}, {"frame_getbuiltins", frame_getbuiltins, METH_O, NULL}, {"frame_getlasti", frame_getlasti, METH_O, NULL}, + {"frame_getvar", test_frame_getvar, METH_VARARGS, NULL}, + {"frame_getvarstring", test_frame_getvarstring, METH_VARARGS, NULL}, {"eval_get_func_name", eval_get_func_name, METH_O, NULL}, + {"eval_get_func_desc", eval_get_func_desc, METH_O, NULL}, {"get_feature_macros", get_feature_macros, METH_NOARGS, NULL}, {"test_code_api", test_code_api, METH_NOARGS, NULL}, {"settrace_to_record", settrace_to_record, METH_O, NULL}, {"test_macros", test_macros, METH_NOARGS, NULL}, {"clear_managed_dict", clear_managed_dict, METH_O, NULL}, - {"add_dict_watcher", add_dict_watcher, METH_O, NULL}, - {"clear_dict_watcher", clear_dict_watcher, METH_O, NULL}, - {"watch_dict", watch_dict, METH_VARARGS, NULL}, - {"unwatch_dict", unwatch_dict, METH_VARARGS, NULL}, - {"get_dict_watcher_events", get_dict_watcher_events, METH_NOARGS, NULL}, {"function_get_code", function_get_code, METH_O, NULL}, {"function_get_globals", function_get_globals, METH_O, NULL}, {"function_get_module", function_get_module, METH_O, NULL}, {"function_get_defaults", function_get_defaults, METH_O, NULL}, {"function_set_defaults", function_set_defaults, METH_VARARGS, NULL}, - {"add_type_watcher", add_type_watcher, METH_O, NULL}, - {"clear_type_watcher", clear_type_watcher, METH_O, NULL}, - {"watch_type", watch_type, METH_VARARGS, NULL}, - {"unwatch_type", unwatch_type, METH_VARARGS, NULL}, - {"get_type_modified_events", get_type_modified_events, METH_NOARGS, NULL}, + {"function_get_kw_defaults", function_get_kw_defaults, METH_O, NULL}, + {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ }; -typedef struct { - char bool_member; - char byte_member; - unsigned char ubyte_member; - short short_member; - unsigned short ushort_member; - int int_member; - unsigned int uint_member; - long long_member; - unsigned long ulong_member; - Py_ssize_t pyssizet_member; - float float_member; - double double_member; - char inplace_member[6]; - long long longlong_member; - unsigned long long ulonglong_member; -} all_structmembers; - -typedef struct { - PyObject_HEAD - all_structmembers structmembers; -} test_structmembers; - -static struct PyMemberDef test_members[] = { - {"T_BOOL", T_BOOL, offsetof(test_structmembers, structmembers.bool_member), 0, NULL}, - {"T_BYTE", T_BYTE, offsetof(test_structmembers, structmembers.byte_member), 0, NULL}, - {"T_UBYTE", T_UBYTE, offsetof(test_structmembers, structmembers.ubyte_member), 0, NULL}, - {"T_SHORT", T_SHORT, offsetof(test_structmembers, structmembers.short_member), 0, NULL}, - {"T_USHORT", T_USHORT, offsetof(test_structmembers, structmembers.ushort_member), 0, NULL}, - {"T_INT", T_INT, offsetof(test_structmembers, structmembers.int_member), 0, NULL}, - {"T_UINT", T_UINT, offsetof(test_structmembers, structmembers.uint_member), 0, NULL}, - {"T_LONG", T_LONG, offsetof(test_structmembers, structmembers.long_member), 0, NULL}, - {"T_ULONG", T_ULONG, offsetof(test_structmembers, structmembers.ulong_member), 0, NULL}, - {"T_PYSSIZET", T_PYSSIZET, offsetof(test_structmembers, structmembers.pyssizet_member), 0, NULL}, - {"T_FLOAT", T_FLOAT, offsetof(test_structmembers, structmembers.float_member), 0, NULL}, - {"T_DOUBLE", T_DOUBLE, offsetof(test_structmembers, structmembers.double_member), 0, NULL}, - {"T_STRING_INPLACE", T_STRING_INPLACE, offsetof(test_structmembers, structmembers.inplace_member), 0, NULL}, - {"T_LONGLONG", T_LONGLONG, offsetof(test_structmembers, structmembers.longlong_member), 0, NULL}, - {"T_ULONGLONG", T_ULONGLONG, offsetof(test_structmembers, structmembers.ulonglong_member), 0, NULL}, - {NULL} -}; - - -static PyObject * -test_structmembers_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = { - "T_BOOL", "T_BYTE", "T_UBYTE", "T_SHORT", "T_USHORT", - "T_INT", "T_UINT", "T_LONG", "T_ULONG", "T_PYSSIZET", - "T_FLOAT", "T_DOUBLE", "T_STRING_INPLACE", - "T_LONGLONG", "T_ULONGLONG", - NULL}; - static const char fmt[] = "|bbBhHiIlknfds#LK"; - test_structmembers *ob; - const char *s = NULL; - Py_ssize_t string_len = 0; - ob = PyObject_New(test_structmembers, type); - if (ob == NULL) - return NULL; - memset(&ob->structmembers, 0, sizeof(all_structmembers)); - if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, - &ob->structmembers.bool_member, - &ob->structmembers.byte_member, - &ob->structmembers.ubyte_member, - &ob->structmembers.short_member, - &ob->structmembers.ushort_member, - &ob->structmembers.int_member, - &ob->structmembers.uint_member, - &ob->structmembers.long_member, - &ob->structmembers.ulong_member, - &ob->structmembers.pyssizet_member, - &ob->structmembers.float_member, - &ob->structmembers.double_member, - &s, &string_len - , &ob->structmembers.longlong_member, - &ob->structmembers.ulonglong_member - )) { - Py_DECREF(ob); - return NULL; - } - if (s != NULL) { - if (string_len > 5) { - Py_DECREF(ob); - PyErr_SetString(PyExc_ValueError, "string too long"); - return NULL; - } - strcpy(ob->structmembers.inplace_member, s); - } - else { - strcpy(ob->structmembers.inplace_member, ""); - } - return (PyObject *)ob; -} - -static void -test_structmembers_free(PyObject *ob) -{ - PyObject_Free(ob); -} - -static PyTypeObject test_structmembersType = { - PyVarObject_HEAD_INIT(NULL, 0) - "test_structmembersType", - sizeof(test_structmembers), /* tp_basicsize */ - 0, /* tp_itemsize */ - test_structmembers_free, /* destructor tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - PyObject_GenericSetAttr, /* tp_setattro */ - 0, /* tp_as_buffer */ - 0, /* tp_flags */ - "Type containing all structmember types", - 0, /* traverseproc tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - test_members, /* tp_members */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - test_structmembers_new, /* tp_new */ -}; - typedef struct { PyObject_HEAD @@ -6536,8 +3517,7 @@ awaitObject_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(v); - ao->ao_iterator = v; + ao->ao_iterator = Py_NewRef(v); return (PyObject *)ao; } @@ -6554,8 +3534,7 @@ awaitObject_dealloc(awaitObject *ao) static PyObject * awaitObject_await(awaitObject *ao) { - Py_INCREF(ao->ao_iterator); - return ao->ao_iterator; + return Py_NewRef(ao->ao_iterator); } static PyAsyncMethods awaitType_as_async = { @@ -6776,8 +3755,7 @@ generic_alias_new(PyObject *item) if (o == NULL) { return NULL; } - Py_INCREF(item); - o->item = item; + o->item = Py_NewRef(item); return (PyObject*) o; } @@ -6945,11 +3923,6 @@ PyInit__testcapi(void) Py_SET_TYPE(&_HashInheritanceTester_Type, &PyType_Type); - Py_SET_TYPE(&test_structmembersType, &PyType_Type); - Py_INCREF(&test_structmembersType); - /* don't use a name starting with "test", since we don't want - test_capi to automatically call this */ - PyModule_AddObject(m, "_test_structmembersType", (PyObject *)&test_structmembersType); if (PyType_Ready(&matmulType) < 0) return NULL; Py_INCREF(&matmulType); @@ -7031,14 +4004,6 @@ PyInit__testcapi(void) PyModule_AddObject(m, "instancemethod", (PyObject *)&PyInstanceMethod_Type); PyModule_AddIntConstant(m, "the_number_three", 3); - PyObject *v; -#ifdef WITH_PYMALLOC - v = Py_True; -#else - v = Py_False; -#endif - Py_INCREF(v); - PyModule_AddObject(m, "WITH_PYMALLOC", v); TestError = PyErr_NewException("_testcapi.error", NULL, NULL); Py_INCREF(TestError); @@ -7062,6 +4027,33 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Unicode(m) < 0) { return NULL; } + if (_PyTestCapi_Init_GetArgs(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_PyTime(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_DateTime(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Docstring(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Mem(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Watchers(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Long(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Float(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Structmember(m) < 0) { + return NULL; + } #ifndef LIMITED_API_AVAILABLE PyModule_AddObjectRef(m, "LIMITED_API_AVAILABLE", Py_False); @@ -7119,35 +4111,3 @@ test_buildvalue_issue38913(PyObject *self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } - -#undef PyArg_ParseTupleAndKeywords -PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *, - const char *, char **, ...); - -static PyObject * -getargs_s_hash_int(PyObject *self, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = {"", "", "x", NULL}; - Py_buffer buf = {NULL}; - const char *s; - int len; - int i = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|s#i", keywords, &buf, &s, &len, &i)) - return NULL; - PyBuffer_Release(&buf); - Py_RETURN_NONE; -} - -static PyObject * -getargs_s_hash_int2(PyObject *self, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = {"", "", "x", NULL}; - Py_buffer buf = {NULL}; - const char *s; - int len; - int i = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|(s#)i", keywords, &buf, &s, &len, &i)) - return NULL; - PyBuffer_Release(&buf); - Py_RETURN_NONE; -} diff --git a/Modules/_testclinic.c b/Modules/_testclinic.c new file mode 100644 index 00000000000000..91fdee24d328d9 --- /dev/null +++ b/Modules/_testclinic.c @@ -0,0 +1,1187 @@ +#ifndef Py_BUILD_CORE_BUILTIN +# define Py_BUILD_CORE_MODULE 1 +#endif + +/* Always enable assertions */ +#undef NDEBUG + +#define PY_SSIZE_T_CLEAN + +#include "Python.h" + +#include "clinic/_testclinic.c.h" + + +/* Pack arguments to a tuple, implicitly increase all the arguments' refcount. + * NULL arguments will be replaced to Py_None. */ +static PyObject * +pack_arguments_newref(int argc, ...) +{ + assert(!PyErr_Occurred()); + PyObject *tuple = PyTuple_New(argc); + if (!tuple) { + return NULL; + } + + va_list vargs; + va_start(vargs, argc); + for (int i = 0; i < argc; i++) { + PyObject *arg = va_arg(vargs, PyObject *); + if (arg) { + if (_PyObject_IsFreed(arg)) { + PyErr_Format(PyExc_AssertionError, + "argument %d at %p is freed or corrupted!", + i, arg); + va_end(vargs); + Py_DECREF(tuple); + return NULL; + } + } + else { + arg = Py_None; + } + PyTuple_SET_ITEM(tuple, i, Py_NewRef(arg)); + } + va_end(vargs); + return tuple; +} + +/* Pack arguments to a tuple. + * `wrapper` is function which converts primitive type to PyObject. + * `arg_type` is type that arguments should be converted to before wrapped. */ +#define RETURN_PACKED_ARGS(argc, wrapper, arg_type, ...) do { \ + assert(!PyErr_Occurred()); \ + arg_type in[argc] = {__VA_ARGS__}; \ + PyObject *out[argc] = {NULL,}; \ + for (int _i = 0; _i < argc; _i++) { \ + out[_i] = wrapper(in[_i]); \ + assert(out[_i] || PyErr_Occurred()); \ + if (!out[_i]) { \ + for (int _j = 0; _j < _i; _j++) { \ + Py_DECREF(out[_j]); \ + } \ + return NULL; \ + } \ + } \ + PyObject *tuple = PyTuple_New(argc); \ + if (!tuple) { \ + for (int _i = 0; _i < argc; _i++) { \ + Py_DECREF(out[_i]); \ + } \ + return NULL; \ + } \ + for (int _i = 0; _i < argc; _i++) { \ + PyTuple_SET_ITEM(tuple, _i, out[_i]); \ + } \ + return tuple; \ + } while (0) + + +/*[clinic input] +module _testclinic +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=d4981b80d6efdb12]*/ + + +/*[clinic input] +test_empty_function + +[clinic start generated code]*/ + +static PyObject * +test_empty_function_impl(PyObject *module) +/*[clinic end generated code: output=0f8aeb3ddced55cb input=0dd7048651ad4ae4]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +objects_converter + + a: object + b: object = NULL + / + +[clinic start generated code]*/ + +static PyObject * +objects_converter_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=3f9c9415ec86c695 input=1533b1bd94187de4]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +bytes_object_converter + + a: PyBytesObject + / + +[clinic start generated code]*/ + +static PyObject * +bytes_object_converter_impl(PyObject *module, PyBytesObject *a) +/*[clinic end generated code: output=7732da869d74b784 input=94211751e7996236]*/ +{ + if (!PyBytes_Check(a)) { + PyErr_SetString(PyExc_AssertionError, + "argument a is not a PyBytesObject"); + return NULL; + } + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +byte_array_object_converter + + a: PyByteArrayObject + / + +[clinic start generated code]*/ + +static PyObject * +byte_array_object_converter_impl(PyObject *module, PyByteArrayObject *a) +/*[clinic end generated code: output=51f15c76f302b1f7 input=b04d253db51c6f56]*/ +{ + if (!PyByteArray_Check(a)) { + PyErr_SetString(PyExc_AssertionError, + "argument a is not a PyByteArrayObject"); + return NULL; + } + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +unicode_converter + + a: unicode + / + +[clinic start generated code]*/ + +static PyObject * +unicode_converter_impl(PyObject *module, PyObject *a) +/*[clinic end generated code: output=1b4a4adbb6ac6e34 input=de7b5adbf07435ba]*/ +{ + if (!PyUnicode_Check(a)) { + PyErr_SetString(PyExc_AssertionError, + "argument a is not a unicode object"); + return NULL; + } + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +bool_converter + + a: bool = True + b: bool(accept={object}) = True + c: bool(accept={int}) = True + / + +[clinic start generated code]*/ + +static PyObject * +bool_converter_impl(PyObject *module, int a, int b, int c) +/*[clinic end generated code: output=17005b0c29afd590 input=7f6537705b2f32f4]*/ +{ + PyObject *obj_a = a ? Py_True : Py_False; + PyObject *obj_b = b ? Py_True : Py_False; + PyObject *obj_c = c ? Py_True : Py_False; + return pack_arguments_newref(3, obj_a, obj_b, obj_c); +} + + +/*[clinic input] +char_converter + + a: char = b'A' + b: char = b'\a' + c: char = b'\b' + d: char = b'\t' + e: char = b'\n' + f: char = b'\v' + g: char = b'\f' + h: char = b'\r' + i: char = b'"' + j: char = b"'" + k: char = b'?' + l: char = b'\\' + m: char = b'\000' + n: char = b'\377' + / + +[clinic start generated code]*/ + +static PyObject * +char_converter_impl(PyObject *module, char a, char b, char c, char d, char e, + char f, char g, char h, char i, char j, char k, char l, + char m, char n) +/*[clinic end generated code: output=f929dbd2e55a9871 input=b601bc5bc7fe85e3]*/ +{ + RETURN_PACKED_ARGS(14, PyLong_FromUnsignedLong, unsigned char, + a, b, c, d, e, f, g, h, i, j, k, l, m, n); +} + + +/*[clinic input] +unsigned_char_converter + + a: unsigned_char = 12 + b: unsigned_char(bitwise=False) = 34 + c: unsigned_char(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_char_converter_impl(PyObject *module, unsigned char a, + unsigned char b, unsigned char c) +/*[clinic end generated code: output=490af3b39ce0b199 input=e859502fbe0b3185]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned char, a, b, c); +} + + +/*[clinic input] +short_converter + + a: short = 12 + / + +[clinic start generated code]*/ + +static PyObject * +short_converter_impl(PyObject *module, short a) +/*[clinic end generated code: output=1ebb7ddb64248988 input=b4e2309a66f650ae]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromLong, long, a); +} + + +/*[clinic input] +unsigned_short_converter + + a: unsigned_short = 12 + b: unsigned_short(bitwise=False) = 34 + c: unsigned_short(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_short_converter_impl(PyObject *module, unsigned short a, + unsigned short b, unsigned short c) +/*[clinic end generated code: output=5f92cc72fc8707a7 input=9d15cd11e741d0c6]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned long, a, b, c); +} + + +/*[clinic input] +int_converter + + a: int = 12 + b: int(accept={int}) = 34 + c: int(accept={str}) = 45 + / + +[clinic start generated code]*/ + +static PyObject * +int_converter_impl(PyObject *module, int a, int b, int c) +/*[clinic end generated code: output=8e56b59be7d0c306 input=a1dbc6344853db7a]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromLong, long, a, b, c); +} + + +/*[clinic input] +unsigned_int_converter + + a: unsigned_int = 12 + b: unsigned_int(bitwise=False) = 34 + c: unsigned_int(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_int_converter_impl(PyObject *module, unsigned int a, unsigned int b, + unsigned int c) +/*[clinic end generated code: output=399a57a05c494cc7 input=8427ed9a3f96272d]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned long, a, b, c); +} + + +/*[clinic input] +long_converter + + a: long = 12 + / + +[clinic start generated code]*/ + +static PyObject * +long_converter_impl(PyObject *module, long a) +/*[clinic end generated code: output=9663d936a652707a input=84ad0ef28f24bd85]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromLong, long, a); +} + + +/*[clinic input] +unsigned_long_converter + + a: unsigned_long = 12 + b: unsigned_long(bitwise=False) = 34 + c: unsigned_long(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_long_converter_impl(PyObject *module, unsigned long a, + unsigned long b, unsigned long c) +/*[clinic end generated code: output=120b82ea9ebd93a8 input=440dd6f1817f5d91]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned long, a, b, c); +} + + +/*[clinic input] +long_long_converter + + a: long_long = 12 + / + +[clinic start generated code]*/ + +static PyObject * +long_long_converter_impl(PyObject *module, long long a) +/*[clinic end generated code: output=5fb5f2220770c3e1 input=730fcb3eecf4d993]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromLongLong, long long, a); +} + + +/*[clinic input] +unsigned_long_long_converter + + a: unsigned_long_long = 12 + b: unsigned_long_long(bitwise=False) = 34 + c: unsigned_long_long(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_long_long_converter_impl(PyObject *module, unsigned long long a, + unsigned long long b, unsigned long long c) +/*[clinic end generated code: output=65b7273e63501762 input=300737b0bdb230e9]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLongLong, unsigned long long, + a, b, c); +} + + +/*[clinic input] +py_ssize_t_converter + + a: Py_ssize_t = 12 + b: Py_ssize_t(accept={int}) = 34 + c: Py_ssize_t(accept={int, NoneType}) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +py_ssize_t_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c) +/*[clinic end generated code: output=ce252143e0ed0372 input=76d0f342e9317a1f]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromSsize_t, Py_ssize_t, a, b, c); +} + + +/*[clinic input] +slice_index_converter + + a: slice_index = 12 + b: slice_index(accept={int}) = 34 + c: slice_index(accept={int, NoneType}) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +slice_index_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c) +/*[clinic end generated code: output=923c6cac77666a6b input=64f99f3f83265e47]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromSsize_t, Py_ssize_t, a, b, c); +} + + +/*[clinic input] +size_t_converter + + a: size_t = 12 + / + +[clinic start generated code]*/ + +static PyObject * +size_t_converter_impl(PyObject *module, size_t a) +/*[clinic end generated code: output=412b5b7334ab444d input=83ae7d9171fbf208]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromSize_t, size_t, a); +} + + +/*[clinic input] +float_converter + + a: float = 12.5 + / + +[clinic start generated code]*/ + +static PyObject * +float_converter_impl(PyObject *module, float a) +/*[clinic end generated code: output=1c98f64f2cf1d55c input=a625b59ad68047d8]*/ +{ + RETURN_PACKED_ARGS(1, PyFloat_FromDouble, double, a); +} + + +/*[clinic input] +double_converter + + a: double = 12.5 + / + +[clinic start generated code]*/ + +static PyObject * +double_converter_impl(PyObject *module, double a) +/*[clinic end generated code: output=a4e8532d284d035d input=098df188f24e7c62]*/ +{ + RETURN_PACKED_ARGS(1, PyFloat_FromDouble, double, a); +} + + +/*[clinic input] +py_complex_converter + + a: Py_complex + / + +[clinic start generated code]*/ + +static PyObject * +py_complex_converter_impl(PyObject *module, Py_complex a) +/*[clinic end generated code: output=9e6ca2eb53b14846 input=e9148a8ca1dbf195]*/ +{ + RETURN_PACKED_ARGS(1, PyComplex_FromCComplex, Py_complex, a); +} + + +/*[clinic input] +str_converter + + a: str = "a" + b: str(accept={robuffer}) = "b" + c: str(accept={robuffer, str}, zeroes=True) = "c" + / + +[clinic start generated code]*/ + +static PyObject * +str_converter_impl(PyObject *module, const char *a, const char *b, + const char *c, Py_ssize_t c_length) +/*[clinic end generated code: output=475bea40548c8cd6 input=bff2656c92ee25de]*/ +{ + assert(!PyErr_Occurred()); + PyObject *out[3] = {NULL,}; + int i = 0; + PyObject *arg; + + arg = PyUnicode_FromString(a); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromString(b); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromStringAndSize(c, c_length); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + PyObject *tuple = PyTuple_New(3); + if (!tuple) { + goto error; + } + for (int j = 0; j < 3; j++) { + PyTuple_SET_ITEM(tuple, j, out[j]); + } + return tuple; + +error: + for (int j = 0; j < i; j++) { + Py_DECREF(out[j]); + } + return NULL; +} + + +/*[clinic input] +str_converter_encoding + + a: str(encoding="idna") + b: str(encoding="idna", accept={bytes, bytearray, str}) + c: str(encoding="idna", accept={bytes, bytearray, str}, zeroes=True) + / + +[clinic start generated code]*/ + +static PyObject * +str_converter_encoding_impl(PyObject *module, char *a, char *b, char *c, + Py_ssize_t c_length) +/*[clinic end generated code: output=af68766049248a1c input=0c5cf5159d0e870d]*/ +{ + assert(!PyErr_Occurred()); + PyObject *out[3] = {NULL,}; + int i = 0; + PyObject *arg; + + arg = PyUnicode_FromString(a); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromString(b); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromStringAndSize(c, c_length); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + PyObject *tuple = PyTuple_New(3); + if (!tuple) { + goto error; + } + for (int j = 0; j < 3; j++) { + PyTuple_SET_ITEM(tuple, j, out[j]); + } + return tuple; + +error: + for (int j = 0; j < i; j++) { + Py_DECREF(out[j]); + } + return NULL; +} + + +static PyObject * +bytes_from_buffer(Py_buffer *buf) +{ + PyObject *bytes_obj = PyBytes_FromStringAndSize(NULL, buf->len); + if (!bytes_obj) { + return NULL; + } + void *bytes_obj_buf = ((PyBytesObject *)bytes_obj)->ob_sval; + if (PyBuffer_ToContiguous(bytes_obj_buf, buf, buf->len, 'C') < 0) { + Py_DECREF(bytes_obj); + return NULL; + } + return bytes_obj; +} + +/*[clinic input] +py_buffer_converter + + a: Py_buffer(accept={str, buffer, NoneType}) + b: Py_buffer(accept={rwbuffer}) + / + +[clinic start generated code]*/ + +static PyObject * +py_buffer_converter_impl(PyObject *module, Py_buffer *a, Py_buffer *b) +/*[clinic end generated code: output=52fb13311e3d6d03 input=775de727de5c7421]*/ +{ + RETURN_PACKED_ARGS(2, bytes_from_buffer, Py_buffer *, a, b); +} + + +/*[clinic input] +keywords + + a: object + b: object + +[clinic start generated code]*/ + +static PyObject * +keywords_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=850aaed53e26729e input=f44b89e718c1a93b]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +keywords_kwonly + + a: object + * + b: object + +[clinic start generated code]*/ + +static PyObject * +keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=a45c48241da584dc input=1f08e39c3312b015]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +keywords_opt + + a: object + b: object = None + c: object = None + +[clinic start generated code]*/ + +static PyObject * +keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, PyObject *c) +/*[clinic end generated code: output=25e4b67d91c76a66 input=b0ba0e4f04904556]*/ +{ + return pack_arguments_newref(3, a, b, c); +} + + +/*[clinic input] +keywords_opt_kwonly + + a: object + b: object = None + * + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +keywords_opt_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=6aa5b655a6e9aeb0 input=f79da689d6c51076]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +keywords_kwonly_opt + + a: object + * + b: object = None + c: object = None + +[clinic start generated code]*/ + +static PyObject * +keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c) +/*[clinic end generated code: output=707f78eb0f55c2b1 input=e0fa1a0e46dca791]*/ +{ + return pack_arguments_newref(3, a, b, c); +} + + +/*[clinic input] +posonly_keywords + + a: object + / + b: object + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=6ac88f4a5f0bfc8d input=fde0a2f79fe82b06]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +posonly_kwonly + + a: object + / + * + b: object + +[clinic start generated code]*/ + +static PyObject * +posonly_kwonly_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=483e6790d3482185 input=78b3712768da9a19]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +posonly_keywords_kwonly + + a: object + / + b: object + * + c: object + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c) +/*[clinic end generated code: output=2fae573e8cc3fad8 input=a1ad5d2295eb803c]*/ +{ + return pack_arguments_newref(3, a, b, c); +} + + +/*[clinic input] +posonly_keywords_opt + + a: object + / + b: object + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=f5eb66241bcf68fb input=51c10de2a120e279]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_opt_keywords_opt + + a: object + b: object = None + / + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_opt_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=d54a30e549296ffd input=f408a1de7dfaf31f]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_kwonly_opt + + a: object + / + * + b: object + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=a20503fe36b4fd62 input=3494253975272f52]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_opt_kwonly_opt + + a: object + b: object = None + / + * + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_opt_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=64f3204a3a0413b6 input=d17516581e478412]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_keywords_kwonly_opt + + a: object + / + b: object + * + c: object + d: object = None + e: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d, PyObject *e) +/*[clinic end generated code: output=dbd7e7ddd6257fa0 input=33529f29e97e5adb]*/ +{ + return pack_arguments_newref(5, a, b, c, d, e); +} + + +/*[clinic input] +posonly_keywords_opt_kwonly_opt + + a: object + / + b: object + c: object = None + * + d: object = None + e: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, PyObject *d, + PyObject *e) +/*[clinic end generated code: output=775d12ae44653045 input=4d4cc62f11441301]*/ +{ + return pack_arguments_newref(5, a, b, c, d, e); +} + + +/*[clinic input] +posonly_opt_keywords_opt_kwonly_opt + + a: object + b: object = None + / + c: object = None + * + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_opt_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, + PyObject *d) +/*[clinic end generated code: output=40c6dc422591eade input=3964960a68622431]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +keyword_only_parameter + + * + a: object + +[clinic start generated code]*/ + +static PyObject * +keyword_only_parameter_impl(PyObject *module, PyObject *a) +/*[clinic end generated code: output=c454b6ce98232787 input=8d2868b8d0b27bdb]*/ +{ + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +posonly_vararg + + a: object + / + b: object + *args: object + +[clinic start generated code]*/ + +static PyObject * +posonly_vararg_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *args) +/*[clinic end generated code: output=ee6713acda6b954e input=783427fe7ec2b67a]*/ +{ + return pack_arguments_newref(3, a, b, args); +} + + +/*[clinic input] +vararg_and_posonly + + a: object + *args: object + / + +[clinic start generated code]*/ + +static PyObject * +vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args) +/*[clinic end generated code: output=42792f799465a14d input=defe017b19ba52e8]*/ +{ + return pack_arguments_newref(2, a, args); +} + + +/*[clinic input] +vararg + + a: object + *args: object + +[clinic start generated code]*/ + +static PyObject * +vararg_impl(PyObject *module, PyObject *a, PyObject *args) +/*[clinic end generated code: output=91ab7a0efc52dd5e input=02c0f772d05f591e]*/ +{ + return pack_arguments_newref(2, a, args); +} + + +/*[clinic input] +vararg_with_default + + a: object + *args: object + b: bool = False + +[clinic start generated code]*/ + +static PyObject * +vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, + int b) +/*[clinic end generated code: output=182c01035958ce92 input=68cafa6a79f89e36]*/ +{ + PyObject *obj_b = b ? Py_True : Py_False; + return pack_arguments_newref(3, a, args, obj_b); +} + + +/*[clinic input] +vararg_with_only_defaults + + *args: object + b: object = None + +[clinic start generated code]*/ + +static PyObject * +vararg_with_only_defaults_impl(PyObject *module, PyObject *args, PyObject *b) +/*[clinic end generated code: output=c06b1826d91f2f7b input=678c069bc67550e1]*/ +{ + return pack_arguments_newref(2, args, b); +} + + + +/*[clinic input] +gh_32092_oob + + pos1: object + pos2: object + *varargs: object + kw1: object = None + kw2: object = None + +Proof-of-concept of GH-32092 OOB bug. + +[clinic start generated code]*/ + +static PyObject * +gh_32092_oob_impl(PyObject *module, PyObject *pos1, PyObject *pos2, + PyObject *varargs, PyObject *kw1, PyObject *kw2) +/*[clinic end generated code: output=ee259c130054653f input=46d15c881608f8ff]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +gh_32092_kw_pass + + pos: object + *args: object + kw: object = None + +Proof-of-concept of GH-32092 keyword args passing bug. + +[clinic start generated code]*/ + +static PyObject * +gh_32092_kw_pass_impl(PyObject *module, PyObject *pos, PyObject *args, + PyObject *kw) +/*[clinic end generated code: output=4a2bbe4f7c8604e9 input=5c0bd5b9079a0cce]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +gh_99233_refcount + + *args: object + / + +Proof-of-concept of GH-99233 refcount error bug. + +[clinic start generated code]*/ + +static PyObject * +gh_99233_refcount_impl(PyObject *module, PyObject *args) +/*[clinic end generated code: output=585855abfbca9a7f input=85f5fb47ac91a626]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +gh_99240_double_free + + a: str(encoding="idna") + b: str(encoding="idna") + / + +Proof-of-concept of GH-99240 double-free bug. + +[clinic start generated code]*/ + +static PyObject * +gh_99240_double_free_impl(PyObject *module, char *a, char *b) +/*[clinic end generated code: output=586dc714992fe2ed input=23db44aa91870fc7]*/ +{ + Py_RETURN_NONE; +} + + +static PyMethodDef tester_methods[] = { + TEST_EMPTY_FUNCTION_METHODDEF + OBJECTS_CONVERTER_METHODDEF + BYTES_OBJECT_CONVERTER_METHODDEF + BYTE_ARRAY_OBJECT_CONVERTER_METHODDEF + UNICODE_CONVERTER_METHODDEF + BOOL_CONVERTER_METHODDEF + CHAR_CONVERTER_METHODDEF + UNSIGNED_CHAR_CONVERTER_METHODDEF + SHORT_CONVERTER_METHODDEF + UNSIGNED_SHORT_CONVERTER_METHODDEF + INT_CONVERTER_METHODDEF + UNSIGNED_INT_CONVERTER_METHODDEF + LONG_CONVERTER_METHODDEF + UNSIGNED_LONG_CONVERTER_METHODDEF + LONG_LONG_CONVERTER_METHODDEF + UNSIGNED_LONG_LONG_CONVERTER_METHODDEF + PY_SSIZE_T_CONVERTER_METHODDEF + SLICE_INDEX_CONVERTER_METHODDEF + SIZE_T_CONVERTER_METHODDEF + FLOAT_CONVERTER_METHODDEF + DOUBLE_CONVERTER_METHODDEF + PY_COMPLEX_CONVERTER_METHODDEF + STR_CONVERTER_METHODDEF + STR_CONVERTER_ENCODING_METHODDEF + PY_BUFFER_CONVERTER_METHODDEF + KEYWORDS_METHODDEF + KEYWORDS_KWONLY_METHODDEF + KEYWORDS_OPT_METHODDEF + KEYWORDS_OPT_KWONLY_METHODDEF + KEYWORDS_KWONLY_OPT_METHODDEF + POSONLY_KEYWORDS_METHODDEF + POSONLY_KWONLY_METHODDEF + POSONLY_KEYWORDS_KWONLY_METHODDEF + POSONLY_KEYWORDS_OPT_METHODDEF + POSONLY_OPT_KEYWORDS_OPT_METHODDEF + POSONLY_KWONLY_OPT_METHODDEF + POSONLY_OPT_KWONLY_OPT_METHODDEF + POSONLY_KEYWORDS_KWONLY_OPT_METHODDEF + POSONLY_KEYWORDS_OPT_KWONLY_OPT_METHODDEF + POSONLY_OPT_KEYWORDS_OPT_KWONLY_OPT_METHODDEF + KEYWORD_ONLY_PARAMETER_METHODDEF + POSONLY_VARARG_METHODDEF + VARARG_AND_POSONLY_METHODDEF + VARARG_METHODDEF + VARARG_WITH_DEFAULT_METHODDEF + VARARG_WITH_ONLY_DEFAULTS_METHODDEF + GH_32092_OOB_METHODDEF + GH_32092_KW_PASS_METHODDEF + GH_99233_REFCOUNT_METHODDEF + GH_99240_DOUBLE_FREE_METHODDEF + {NULL, NULL} +}; + +static struct PyModuleDef _testclinic_module = { + PyModuleDef_HEAD_INIT, + .m_name = "_testclinic", + .m_size = 0, + .m_methods = tester_methods, +}; + +PyMODINIT_FUNC +PyInit__testclinic(void) +{ + return PyModule_Create(&_testclinic_module); +} + +#undef RETURN_PACKED_ARGS diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index d74f92808d3aba..b14b8ac3c74054 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -14,7 +14,7 @@ #include "Python.h" #include "pycore_atomic_funcs.h" // _Py_atomic_int_get() #include "pycore_bitutils.h" // _Py_bswap32() -#include "pycore_compile.h" // _PyCompile_OptimizeCfg() +#include "pycore_compile.h" // _PyCompile_CodeGen, _PyCompile_OptimizeCfg #include "pycore_fileutils.h" // _Py_normpath #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_gc.h" // PyGC_Head @@ -523,13 +523,31 @@ set_eval_frame_record(PyObject *self, PyObject *list) PyErr_SetString(PyExc_TypeError, "argument must be a list"); return NULL; } - Py_CLEAR(record_list); - Py_INCREF(list); - record_list = list; + Py_XSETREF(record_list, Py_NewRef(list)); _PyInterpreterState_SetEvalFrameFunc(PyInterpreterState_Get(), record_eval); Py_RETURN_NONE; } +/*[clinic input] + +_testinternalcapi.compiler_codegen -> object + + ast: object + filename: object + optimize: int + +Apply compiler code generation to an AST. +[clinic start generated code]*/ + +static PyObject * +_testinternalcapi_compiler_codegen_impl(PyObject *module, PyObject *ast, + PyObject *filename, int optimize) +/*[clinic end generated code: output=fbbbbfb34700c804 input=e9fbe6562f7f75e4]*/ +{ + PyCompilerFlags *flags = NULL; + return _PyCompile_CodeGen(ast, filename, flags, optimize); +} + /*[clinic input] @@ -613,6 +631,7 @@ static PyMethodDef TestMethods[] = { {"DecodeLocaleEx", decode_locale_ex, METH_VARARGS}, {"set_eval_frame_default", set_eval_frame_default, METH_NOARGS, NULL}, {"set_eval_frame_record", set_eval_frame_record, METH_O, NULL}, + _TESTINTERNALCAPI_COMPILER_CODEGEN_METHODDEF _TESTINTERNALCAPI_OPTIMIZE_CFG_METHODDEF {"get_interp_settings", get_interp_settings, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index b8993a29ae95df..e34854f7025798 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -57,8 +57,7 @@ Example_demo(ExampleObject *self, PyObject *args) if (!PyArg_ParseTuple(args, "|O:demo", &o)) return NULL; if (o != NULL && PyUnicode_Check(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } Py_RETURN_NONE; } @@ -77,8 +76,7 @@ Example_getattro(ExampleObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -151,8 +149,7 @@ _testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject * return NULL; } assert(PyType_GetModuleByDef(Py_TYPE(self), &def_meth_state_access) == retval); - Py_INCREF(retval); - return retval; + return Py_NewRef(retval); } /*[clinic input] diff --git a/Modules/_testsinglephase.c b/Modules/_testsinglephase.c new file mode 100644 index 00000000000000..3bfe159e54fe49 --- /dev/null +++ b/Modules/_testsinglephase.c @@ -0,0 +1,78 @@ + +/* Testing module for single-phase initialization of extension modules + */ +#ifndef Py_BUILD_CORE_BUILTIN +# define Py_BUILD_CORE_MODULE 1 +#endif + +#include "Python.h" +#include "pycore_namespace.h" // _PyNamespace_New() + + +/* Function of two integers returning integer */ + +PyDoc_STRVAR(testexport_foo_doc, +"foo(i,j)\n\ +\n\ +Return the sum of i and j."); + +static PyObject * +testexport_foo(PyObject *self, PyObject *args) +{ + long i, j; + long res; + if (!PyArg_ParseTuple(args, "ll:foo", &i, &j)) + return NULL; + res = i + j; + return PyLong_FromLong(res); +} + + +static PyMethodDef TestMethods[] = { + {"foo", testexport_foo, METH_VARARGS, + testexport_foo_doc}, + {NULL, NULL} /* sentinel */ +}; + + +static struct PyModuleDef _testsinglephase = { + PyModuleDef_HEAD_INIT, + .m_name = "_testsinglephase", + .m_doc = PyDoc_STR("Test module _testsinglephase (main)"), + .m_size = -1, // no module state + .m_methods = TestMethods, +}; + + +PyMODINIT_FUNC +PyInit__testsinglephase(void) +{ + PyObject *module = PyModule_Create(&_testsinglephase); + if (module == NULL) { + return NULL; + } + + /* Add an exception type */ + PyObject *temp = PyErr_NewException("_testsinglephase.error", NULL, NULL); + if (temp == NULL) { + goto error; + } + if (PyModule_AddObject(module, "error", temp) != 0) { + Py_DECREF(temp); + goto error; + } + + if (PyModule_AddIntConstant(module, "int_const", 1969) != 0) { + goto error; + } + + if (PyModule_AddStringConstant(module, "str_const", "something different") != 0) { + goto error; + } + + return module; + +error: + Py_DECREF(module); + return NULL; +} diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 5968d4e2e0ee15..d323ca83dcffa7 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -135,7 +135,7 @@ lock_acquire_parse_args(PyObject *args, PyObject *kwds, *timeout = unset_timeout ; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|iO:acquire", kwlist, + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|pO:acquire", kwlist, &blocking, &timeout_obj)) return -1; @@ -1145,6 +1145,11 @@ thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs) return NULL; } + if (PySys_Audit("_thread.start_new_thread", "OOO", + func, args, kwargs ? kwargs : Py_None) < 0) { + return NULL; + } + PyInterpreterState *interp = _PyInterpreterState_GET(); if (!_PyInterpreterState_HasFeature(interp, Py_RTFLAGS_THREADS)) { PyErr_SetString(PyExc_RuntimeError, @@ -1160,7 +1165,10 @@ thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs) boot->tstate = _PyThreadState_Prealloc(boot->interp); if (boot->tstate == NULL) { PyMem_Free(boot); - return PyErr_NoMemory(); + if (!PyErr_Occurred()) { + return PyErr_NoMemory(); + } + return NULL; } boot->runtime = runtime; boot->func = Py_NewRef(func); diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index 4807ad59f6da2a..d4a129058702a2 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -784,16 +784,14 @@ PyTclObject_string(PyTclObject *self, void *ignored) if (!self->string) return NULL; } - Py_INCREF(self->string); - return self->string; + return Py_NewRef(self->string); } static PyObject * PyTclObject_str(PyTclObject *self) { if (self->string) { - Py_INCREF(self->string); - return self->string; + return Py_NewRef(self->string); } /* XXX Could cache result if it is non-ASCII. */ return unicodeFromTclObj(self->value); @@ -1107,8 +1105,7 @@ fromBignumObj(TkappObject *tkapp, Tcl_Obj *value) PyMem_Free(bytes); if (res != NULL && bigValue.sign == MP_NEG) { PyObject *res2 = PyNumber_Negative(res); - Py_DECREF(res); - res = res2; + Py_SETREF(res, res2); } mp_clear(&bigValue); return res; @@ -1736,8 +1733,7 @@ SetVar(TkappObject *self, PyObject *args, int flags) if (!ok) Tkinter_Error(self); else { - res = Py_None; - Py_INCREF(res); + res = Py_NewRef(Py_None); } LEAVE_OVERLAP_TCL break; @@ -1755,8 +1751,7 @@ SetVar(TkappObject *self, PyObject *args, int flags) if (!ok) Tkinter_Error(self); else { - res = Py_None; - Py_INCREF(res); + res = Py_NewRef(Py_None); } LEAVE_OVERLAP_TCL break; @@ -1842,8 +1837,7 @@ UnsetVar(TkappObject *self, PyObject *args, int flags) if (code == TCL_ERROR) res = Tkinter_Error(self); else { - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); } LEAVE_OVERLAP_TCL return res; @@ -1883,8 +1877,7 @@ _tkinter_tkapp_getint(TkappObject *self, PyObject *arg) PyObject *result; if (PyLong_Check(arg)) { - Py_INCREF(arg); - return arg; + return Py_NewRef(arg); } if (PyTclObject_Check(arg)) { @@ -1928,8 +1921,7 @@ _tkinter_tkapp_getdouble(TkappObject *self, PyObject *arg) double v; if (PyFloat_Check(arg)) { - Py_INCREF(arg); - return arg; + return Py_NewRef(arg); } if (PyNumber_Check(arg)) { @@ -2145,8 +2137,7 @@ _tkinter_tkapp_splitlist(TkappObject *self, PyObject *arg) return v; } if (PyTuple_Check(arg)) { - Py_INCREF(arg); - return arg; + return Py_NewRef(arg); } if (PyList_Check(arg)) { return PySequence_Tuple(arg); @@ -2172,8 +2163,7 @@ _tkinter_tkapp_splitlist(TkappObject *self, PyObject *arg) for (i = 0; i < argc; i++) { PyObject *s = unicodeFromTclString(argv[i]); if (!s) { - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); goto finally; } PyTuple_SET_ITEM(v, i, s); @@ -2322,10 +2312,8 @@ _tkinter_tkapp_createcommand_impl(TkappObject *self, const char *name, data = PyMem_NEW(PythonCmd_ClientData, 1); if (!data) return PyErr_NoMemory(); - Py_INCREF(self); - Py_INCREF(func); - data->self = (PyObject *) self; - data->func = func; + data->self = Py_NewRef(self); + data->func = Py_NewRef(func); if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { Tcl_Condition cond = NULL; CommandEvent *ev = (CommandEvent*)attemptckalloc(sizeof(CommandEvent)); @@ -2430,10 +2418,8 @@ NewFHCD(PyObject *func, PyObject *file, int id) FileHandler_ClientData *p; p = PyMem_NEW(FileHandler_ClientData, 1); if (p != NULL) { - Py_XINCREF(func); - Py_XINCREF(file); - p->func = func; - p->file = file; + p->func = Py_XNewRef(func); + p->file = Py_XNewRef(file); p->id = id; p->next = HeadFHCD; HeadFHCD = p; @@ -2591,13 +2577,11 @@ Tktt_New(PyObject *func) if (v == NULL) return NULL; - Py_INCREF(func); v->token = NULL; - v->func = func; + v->func = Py_NewRef(func); /* Extra reference, deleted when called or when handler is deleted */ - Py_INCREF(v); - return v; + return (TkttObject*)Py_NewRef(v); } static void @@ -2848,7 +2832,7 @@ Tkapp_WantObjects(PyObject *self, PyObject *args) { int wantobjects = -1; - if (!PyArg_ParseTuple(args, "|i:wantobjects", &wantobjects)) + if (!PyArg_ParseTuple(args, "|p:wantobjects", &wantobjects)) return NULL; if (wantobjects == -1) return PyBool_FromLong(((TkappObject*)self)->wantobjects); @@ -2940,9 +2924,8 @@ _flatten1(FlattenContext* context, PyObject* item, int depth) if (context->size + 1 > context->maxsize && !_bump(context, 1)) return 0; - Py_INCREF(o); PyTuple_SET_ITEM(context->tuple, - context->size++, o); + context->size++, Py_NewRef(o)); } } } else { @@ -2995,11 +2978,11 @@ _tkinter.create screenName: str(accept={str, NoneType}) = None baseName: str = "" className: str = "Tk" - interactive: bool(accept={int}) = False - wantobjects: bool(accept={int}) = False - wantTk: bool(accept={int}) = True + interactive: bool = False + wantobjects: bool = False + wantTk: bool = True if false, then Tk_Init() doesn't get called - sync: bool(accept={int}) = False + sync: bool = False if true, then pass -sync to wish use: str(accept={str, NoneType}) = None if not None, then pass -use to wish @@ -3012,7 +2995,7 @@ _tkinter_create_impl(PyObject *module, const char *screenName, const char *baseName, const char *className, int interactive, int wantobjects, int wantTk, int sync, const char *use) -/*[clinic end generated code: output=e3315607648e6bb4 input=da9b17ee7358d862]*/ +/*[clinic end generated code: output=e3315607648e6bb4 input=09afef9adea70a19]*/ { /* XXX baseName is not used anymore; * try getting rid of it. */ @@ -3266,8 +3249,7 @@ PyInit__tkinter(void) Py_DECREF(m); return NULL; } - Py_INCREF(o); - if (PyModule_AddObject(m, "TclError", o)) { + if (PyModule_AddObject(m, "TclError", Py_NewRef(o))) { Py_DECREF(o); Py_DECREF(m); return NULL; diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 44a1f7b673c0eb..ac16626f2101ba 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -16,10 +16,9 @@ module _tracemalloc [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=708a98302fc46e5f]*/ -_Py_DECLARE_STR(anon_unknown, "<unknown>"); +#define tracemalloc_config _PyRuntime.tracemalloc.config -/* Trace memory blocks allocated by PyMem_RawMalloc() */ -#define TRACE_RAW_MALLOC +_Py_DECLARE_STR(anon_unknown, "<unknown>"); /* Forward declaration */ static void tracemalloc_stop(void); @@ -33,19 +32,14 @@ static void raw_free(void *ptr); #define TO_PTR(key) ((const void *)(uintptr_t)(key)) #define FROM_PTR(key) ((uintptr_t)(key)) -/* Protected by the GIL */ -static struct { - PyMemAllocatorEx mem; - PyMemAllocatorEx raw; - PyMemAllocatorEx obj; -} allocators; +#define allocators _PyRuntime.tracemalloc.allocators #if defined(TRACE_RAW_MALLOC) /* This lock is needed because tracemalloc_free() is called without the GIL held from PyMem_RawFree(). It cannot acquire the lock because it would introduce a deadlock in _PyThreadState_DeleteCurrent(). */ -static PyThread_type_lock tables_lock; +# define tables_lock _PyRuntime.tracemalloc.tables_lock # define TABLES_LOCK() PyThread_acquire_lock(tables_lock, 1) # define TABLES_UNLOCK() PyThread_release_lock(tables_lock) #else @@ -57,33 +51,8 @@ static PyThread_type_lock tables_lock; #define DEFAULT_DOMAIN 0 -/* Pack the frame_t structure to reduce the memory footprint on 64-bit - architectures: 12 bytes instead of 16. */ -typedef struct -#ifdef __GNUC__ -__attribute__((packed)) -#elif defined(_MSC_VER) -#pragma pack(push, 4) -#endif -{ - /* filename cannot be NULL: "<unknown>" is used if the Python frame - filename is NULL */ - PyObject *filename; - unsigned int lineno; -} frame_t; -#ifdef _MSC_VER -#pragma pack(pop) -#endif - - -typedef struct { - Py_uhash_t hash; - /* Number of frames stored */ - uint16_t nframe; - /* Total number of frames the traceback had */ - uint16_t total_nframe; - frame_t frames[1]; -} traceback_t; +typedef struct tracemalloc_frame frame_t; +typedef struct tracemalloc_traceback traceback_t; #define TRACEBACK_SIZE(NFRAME) \ (sizeof(traceback_t) + sizeof(frame_t) * (NFRAME - 1)) @@ -94,7 +63,8 @@ typedef struct { static const unsigned long MAX_NFRAME = Py_MIN(UINT16_MAX, ((SIZE_MAX - sizeof(traceback_t)) / sizeof(frame_t) + 1)); -static traceback_t tracemalloc_empty_traceback; +#define tracemalloc_empty_traceback _PyRuntime.tracemalloc.empty_traceback + /* Trace of a memory block */ typedef struct { @@ -106,35 +76,13 @@ typedef struct { } trace_t; -/* Size in bytes of currently traced memory. - Protected by TABLES_LOCK(). */ -static size_t tracemalloc_traced_memory = 0; - -/* Peak size in bytes of traced memory. - Protected by TABLES_LOCK(). */ -static size_t tracemalloc_peak_traced_memory = 0; - -/* Hash table used as a set to intern filenames: - PyObject* => PyObject*. - Protected by the GIL */ -static _Py_hashtable_t *tracemalloc_filenames = NULL; - -/* Buffer to store a new traceback in traceback_new(). - Protected by the GIL. */ -static traceback_t *tracemalloc_traceback = NULL; - -/* Hash table used as a set to intern tracebacks: - traceback_t* => traceback_t* - Protected by the GIL */ -static _Py_hashtable_t *tracemalloc_tracebacks = NULL; - -/* pointer (void*) => trace (trace_t*). - Protected by TABLES_LOCK(). */ -static _Py_hashtable_t *tracemalloc_traces = NULL; - -/* domain (unsigned int) => traces (_Py_hashtable_t). - Protected by TABLES_LOCK(). */ -static _Py_hashtable_t *tracemalloc_domains = NULL; +#define tracemalloc_traced_memory _PyRuntime.tracemalloc.traced_memory +#define tracemalloc_peak_traced_memory _PyRuntime.tracemalloc.peak_traced_memory +#define tracemalloc_filenames _PyRuntime.tracemalloc.filenames +#define tracemalloc_traceback _PyRuntime.tracemalloc.traceback +#define tracemalloc_tracebacks _PyRuntime.tracemalloc.tracebacks +#define tracemalloc_traces _PyRuntime.tracemalloc.traces +#define tracemalloc_domains _PyRuntime.tracemalloc.domains #ifdef TRACE_DEBUG @@ -155,7 +103,7 @@ tracemalloc_error(const char *format, ...) #if defined(TRACE_RAW_MALLOC) #define REENTRANT_THREADLOCAL -static Py_tss_t tracemalloc_reentrant_key = Py_tss_NEEDS_INIT; +#define tracemalloc_reentrant_key _PyRuntime.tracemalloc.reentrant_key /* Any non-NULL pointer can be used */ #define REENTRANT Py_True @@ -347,8 +295,8 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) else { /* tracemalloc_filenames is responsible to keep a reference to the filename */ - Py_INCREF(filename); - if (_Py_hashtable_set(tracemalloc_filenames, filename, NULL) < 0) { + if (_Py_hashtable_set(tracemalloc_filenames, Py_NewRef(filename), + NULL) < 0) { Py_DECREF(filename); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the filename"); @@ -407,7 +355,7 @@ traceback_get_frames(traceback_t *traceback) if (pyframe == NULL) { break; } - if (traceback->nframe < _Py_tracemalloc_config.max_nframe) { + if (traceback->nframe < tracemalloc_config.max_nframe) { tracemalloc_get_frame(pyframe, &traceback->frames[traceback->nframe]); assert(traceback->frames[traceback->nframe].filename != NULL); traceback->nframe++; @@ -505,7 +453,7 @@ tracemalloc_get_traces_table(unsigned int domain) static void tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) { - assert(_Py_tracemalloc_config.tracing); + assert(tracemalloc_config.tracing); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); if (!traces) { @@ -529,7 +477,7 @@ static int tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, size_t size) { - assert(_Py_tracemalloc_config.tracing); + assert(tracemalloc_config.tracing); traceback_t *traceback = traceback_new(); if (traceback == NULL) { @@ -863,13 +811,13 @@ tracemalloc_clear_traces(void) static int tracemalloc_init(void) { - if (_Py_tracemalloc_config.initialized == TRACEMALLOC_FINALIZED) { + if (tracemalloc_config.initialized == TRACEMALLOC_FINALIZED) { PyErr_SetString(PyExc_RuntimeError, "the tracemalloc module has been unloaded"); return -1; } - if (_Py_tracemalloc_config.initialized == TRACEMALLOC_INITIALIZED) + if (tracemalloc_config.initialized == TRACEMALLOC_INITIALIZED) return 0; PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); @@ -919,7 +867,7 @@ tracemalloc_init(void) tracemalloc_empty_traceback.frames[0].lineno = 0; tracemalloc_empty_traceback.hash = traceback_hash(&tracemalloc_empty_traceback); - _Py_tracemalloc_config.initialized = TRACEMALLOC_INITIALIZED; + tracemalloc_config.initialized = TRACEMALLOC_INITIALIZED; return 0; } @@ -927,9 +875,9 @@ tracemalloc_init(void) static void tracemalloc_deinit(void) { - if (_Py_tracemalloc_config.initialized != TRACEMALLOC_INITIALIZED) + if (tracemalloc_config.initialized != TRACEMALLOC_INITIALIZED) return; - _Py_tracemalloc_config.initialized = TRACEMALLOC_FINALIZED; + tracemalloc_config.initialized = TRACEMALLOC_FINALIZED; tracemalloc_stop(); @@ -969,12 +917,12 @@ tracemalloc_start(int max_nframe) return -1; } - if (_Py_tracemalloc_config.tracing) { + if (tracemalloc_config.tracing) { /* hook already installed: do nothing */ return 0; } - _Py_tracemalloc_config.max_nframe = max_nframe; + tracemalloc_config.max_nframe = max_nframe; /* allocate a buffer to store a new traceback */ size = TRACEBACK_SIZE(max_nframe); @@ -1010,7 +958,7 @@ tracemalloc_start(int max_nframe) PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); /* everything is ready: start tracing Python memory allocations */ - _Py_tracemalloc_config.tracing = 1; + tracemalloc_config.tracing = 1; return 0; } @@ -1019,11 +967,11 @@ tracemalloc_start(int max_nframe) static void tracemalloc_stop(void) { - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return; /* stop tracing Python memory allocations */ - _Py_tracemalloc_config.tracing = 0; + tracemalloc_config.tracing = 0; /* unregister the hook on memory allocators */ #ifdef TRACE_RAW_MALLOC @@ -1051,7 +999,7 @@ static PyObject * _tracemalloc_is_tracing_impl(PyObject *module) /*[clinic end generated code: output=2d763b42601cd3ef input=af104b0a00192f63]*/ { - return PyBool_FromLong(_Py_tracemalloc_config.tracing); + return PyBool_FromLong(tracemalloc_config.tracing); } @@ -1065,7 +1013,7 @@ static PyObject * _tracemalloc_clear_traces_impl(PyObject *module) /*[clinic end generated code: output=a86080ee41b84197 input=0dab5b6c785183a5]*/ { - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) Py_RETURN_NONE; set_reentrant(1); @@ -1085,8 +1033,7 @@ frame_to_pyobject(frame_t *frame) if (frame_obj == NULL) return NULL; - Py_INCREF(frame->filename); - PyTuple_SET_ITEM(frame_obj, 0, frame->filename); + PyTuple_SET_ITEM(frame_obj, 0, Py_NewRef(frame->filename)); lineno_obj = PyLong_FromUnsignedLong(frame->lineno); if (lineno_obj == NULL) { @@ -1107,8 +1054,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) if (intern_table != NULL) { frames = _Py_hashtable_get(intern_table, (const void *)traceback); if (frames) { - Py_INCREF(frames); - return frames; + return Py_NewRef(frames); } } @@ -1347,7 +1293,7 @@ _tracemalloc__get_traces_impl(PyObject *module) if (get_traces.list == NULL) goto error; - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return get_traces.list; /* the traceback hash table is used temporarily to intern traceback tuple @@ -1420,7 +1366,7 @@ static traceback_t* tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) { - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return NULL; trace_t *trace; @@ -1500,7 +1446,7 @@ _PyMem_DumpTraceback(int fd, const void *ptr) traceback_t *traceback; int i; - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { PUTS(fd, "Enable tracemalloc to get the memory block " "allocation traceback\n\n"); return; @@ -1574,7 +1520,7 @@ static PyObject * _tracemalloc_get_traceback_limit_impl(PyObject *module) /*[clinic end generated code: output=d556d9306ba95567 input=da3cd977fc68ae3b]*/ { - return PyLong_FromLong(_Py_tracemalloc_config.max_nframe); + return PyLong_FromLong(tracemalloc_config.max_nframe); } @@ -1632,7 +1578,7 @@ _tracemalloc_get_traced_memory_impl(PyObject *module) { Py_ssize_t size, peak_size; - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return Py_BuildValue("ii", 0, 0); TABLES_LOCK(); @@ -1656,7 +1602,7 @@ static PyObject * _tracemalloc_reset_peak_impl(PyObject *module) /*[clinic end generated code: output=140c2870f691dbb2 input=18afd0635066e9ce]*/ { - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { Py_RETURN_NONE; } @@ -1737,7 +1683,7 @@ PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, int res; PyGILState_STATE gil_state; - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { /* tracemalloc is not tracing: do nothing */ return -2; } @@ -1756,7 +1702,7 @@ PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, int PyTraceMalloc_Untrack(unsigned int domain, uintptr_t ptr) { - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { /* tracemalloc is not tracing: do nothing */ return -2; } @@ -1779,7 +1725,7 @@ _PyTraceMalloc_NewReference(PyObject *op) { assert(PyGILState_Check()); - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { /* tracemalloc is not tracing: do nothing */ return -1; } diff --git a/Modules/_typingmodule.c b/Modules/_typingmodule.c index 8b6faa646d6187..262dddb63fd5fe 100644 --- a/Modules/_typingmodule.c +++ b/Modules/_typingmodule.c @@ -23,8 +23,7 @@ static PyObject * _typing__idfunc(PyObject *module, PyObject *x) /*[clinic end generated code: output=63c38be4a6ec5f2c input=49f17284b43de451]*/ { - Py_INCREF(x); - return x; + return Py_NewRef(x); } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 71e74d155cdc00..f4d982b15d402a 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -291,8 +291,7 @@ _winapi_Overlapped_getbuffer_impl(OverlappedObject *self) return NULL; } res = self->read_buffer ? self->read_buffer : Py_None; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /*[clinic input] @@ -405,13 +404,13 @@ _winapi_CloseHandle_impl(PyObject *module, HANDLE handle) _winapi.ConnectNamedPipe handle: HANDLE - overlapped as use_overlapped: bool(accept={int}) = False + overlapped as use_overlapped: bool = False [clinic start generated code]*/ static PyObject * _winapi_ConnectNamedPipe_impl(PyObject *module, HANDLE handle, int use_overlapped) -/*[clinic end generated code: output=335a0e7086800671 input=34f937c1c86e5e68]*/ +/*[clinic end generated code: output=335a0e7086800671 input=a80e56e8bd370e31]*/ { BOOL success; OverlappedObject *overlapped = NULL; @@ -1394,6 +1393,30 @@ _winapi_MapViewOfFile_impl(PyObject *module, HANDLE file_map, return address; } +/*[clinic input] +_winapi.UnmapViewOfFile + + address: LPCVOID + / +[clinic start generated code]*/ + +static PyObject * +_winapi_UnmapViewOfFile_impl(PyObject *module, LPCVOID address) +/*[clinic end generated code: output=4f7e18ac75d19744 input=8c4b6119ad9288a3]*/ +{ + BOOL success; + + Py_BEGIN_ALLOW_THREADS + success = UnmapViewOfFile(address); + Py_END_ALLOW_THREADS + + if (!success) { + return PyErr_SetFromWindowsErr(0); + } + + Py_RETURN_NONE; +} + /*[clinic input] _winapi.OpenFileMapping -> HANDLE @@ -1553,13 +1576,13 @@ _winapi.ReadFile handle: HANDLE size: DWORD - overlapped as use_overlapped: bool(accept={int}) = False + overlapped as use_overlapped: bool = False [clinic start generated code]*/ static PyObject * _winapi_ReadFile_impl(PyObject *module, HANDLE handle, DWORD size, int use_overlapped) -/*[clinic end generated code: output=d3d5b44a8201b944 input=08c439d03a11aac5]*/ +/*[clinic end generated code: output=d3d5b44a8201b944 input=4f82f8e909ad91ad]*/ { DWORD nread; PyObject *buf; @@ -1839,13 +1862,13 @@ _winapi.WriteFile handle: HANDLE buffer: object - overlapped as use_overlapped: bool(accept={int}) = False + overlapped as use_overlapped: bool = False [clinic start generated code]*/ static PyObject * _winapi_WriteFile_impl(PyObject *module, HANDLE handle, PyObject *buffer, int use_overlapped) -/*[clinic end generated code: output=2ca80f6bf3fa92e3 input=11eae2a03aa32731]*/ +/*[clinic end generated code: output=2ca80f6bf3fa92e3 input=2badb008c8a2e2a0]*/ { Py_buffer _buf, *buf; DWORD len, written; @@ -2063,6 +2086,7 @@ static PyMethodDef winapi_functions[] = { _WINAPI_READFILE_METHODDEF _WINAPI_SETNAMEDPIPEHANDLESTATE_METHODDEF _WINAPI_TERMINATEPROCESS_METHODDEF + _WINAPI_UNMAPVIEWOFFILE_METHODDEF _WINAPI_VIRTUALQUERYSIZE_METHODDEF _WINAPI_WAITNAMEDPIPE_METHODDEF _WINAPI_WAITFORMULTIPLEOBJECTS_METHODDEF diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 9d979ef113e686..0892fa3a9595e8 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -6,11 +6,15 @@ #endif #include "Python.h" +// XXX This module should not rely on internal API. #include "pycore_frame.h" #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_interpreteridobject.h" +#define MODULE_NAME "_xxsubinterpreters" + + static char * _copy_raw_string(PyObject *strobj) { @@ -28,13 +32,186 @@ _copy_raw_string(PyObject *strobj) } static PyInterpreterState * -_get_current(void) +_get_current_interp(void) { // PyInterpreterState_Get() aborts if lookup fails, so don't need // to check the result for NULL. return PyInterpreterState_Get(); } +static PyObject * +_get_current_module(void) +{ + // We ensured it was imported in _run_script(). + PyObject *name = PyUnicode_FromString(MODULE_NAME); + if (name == NULL) { + return NULL; + } + PyObject *mod = PyImport_GetModule(name); + Py_DECREF(name); + if (mod == NULL) { + return NULL; + } + assert(mod != Py_None); + return mod; +} + +static PyObject * +get_module_from_owned_type(PyTypeObject *cls) +{ + assert(cls != NULL); + return _get_current_module(); + // XXX Use the more efficient API now that we use heap types: + //return PyType_GetModule(cls); +} + +static struct PyModuleDef moduledef; + +static PyObject * +get_module_from_type(PyTypeObject *cls) +{ + assert(cls != NULL); + return _get_current_module(); + // XXX Use the more efficient API now that we use heap types: + //return PyType_GetModuleByDef(cls, &moduledef); +} + +static PyObject * +add_new_exception(PyObject *mod, const char *name, PyObject *base) +{ + assert(!PyObject_HasAttrString(mod, name)); + PyObject *exctype = PyErr_NewException(name, base, NULL); + if (exctype == NULL) { + return NULL; + } + int res = PyModule_AddType(mod, (PyTypeObject *)exctype); + if (res < 0) { + Py_DECREF(exctype); + return NULL; + } + return exctype; +} + +#define ADD_NEW_EXCEPTION(MOD, NAME, BASE) \ + add_new_exception(MOD, MODULE_NAME "." Py_STRINGIFY(NAME), BASE) + +static PyTypeObject * +add_new_type(PyObject *mod, PyType_Spec *spec, crossinterpdatafunc shared) +{ + PyTypeObject *cls = (PyTypeObject *)PyType_FromMetaclass( + NULL, mod, spec, NULL); + if (cls == NULL) { + return NULL; + } + if (PyModule_AddType(mod, cls) < 0) { + Py_DECREF(cls); + return NULL; + } + if (shared != NULL) { + if (_PyCrossInterpreterData_RegisterClass(cls, shared)) { + Py_DECREF(cls); + return NULL; + } + } + return cls; +} + +static int +_release_xid_data(_PyCrossInterpreterData *data, int ignoreexc) +{ + PyObject *exctype, *excval, *exctb; + if (ignoreexc) { + PyErr_Fetch(&exctype, &excval, &exctb); + } + int res = _PyCrossInterpreterData_Release(data); + if (res < 0) { + // XXX Fix this! + /* The owning interpreter is already destroyed. + * Ideally, this shouldn't ever happen. When an interpreter is + * about to be destroyed, we should clear out all of its objects + * from every channel associated with that interpreter. + * For now we hack around that to resolve refleaks, by decref'ing + * the released object here, even if its the wrong interpreter. + * The owning interpreter has already been destroyed + * so we should be okay, especially since the currently + * shareable types are all very basic, with no GC. + * That said, it becomes much messier once interpreters + * no longer share a GIL, so this needs to be fixed before then. */ + _PyCrossInterpreterData_Clear(NULL, data); + if (ignoreexc) { + // XXX Emit a warning? + PyErr_Clear(); + } + } + if (ignoreexc) { + PyErr_Restore(exctype, excval, exctb); + } + return res; +} + + +/* module state *************************************************************/ + +typedef struct { + PyTypeObject *ChannelIDType; + + /* interpreter exceptions */ + PyObject *RunFailedError; + + /* channel exceptions */ + PyObject *ChannelError; + PyObject *ChannelNotFoundError; + PyObject *ChannelClosedError; + PyObject *ChannelEmptyError; + PyObject *ChannelNotEmptyError; +} module_state; + +static inline module_state * +get_module_state(PyObject *mod) +{ + assert(mod != NULL); + module_state *state = PyModule_GetState(mod); + assert(state != NULL); + return state; +} + +static int +traverse_module_state(module_state *state, visitproc visit, void *arg) +{ + /* heap types */ + Py_VISIT(state->ChannelIDType); + + /* interpreter exceptions */ + Py_VISIT(state->RunFailedError); + + /* channel exceptions */ + Py_VISIT(state->ChannelError); + Py_VISIT(state->ChannelNotFoundError); + Py_VISIT(state->ChannelClosedError); + Py_VISIT(state->ChannelEmptyError); + Py_VISIT(state->ChannelNotEmptyError); + return 0; +} + +static int +clear_module_state(module_state *state) +{ + /* heap types */ + (void)_PyCrossInterpreterData_UnregisterClass(state->ChannelIDType); + Py_CLEAR(state->ChannelIDType); + + /* interpreter exceptions */ + Py_CLEAR(state->RunFailedError); + + /* channel exceptions */ + Py_CLEAR(state->ChannelError); + Py_CLEAR(state->ChannelNotFoundError); + Py_CLEAR(state->ChannelClosedError); + Py_CLEAR(state->ChannelEmptyError); + Py_CLEAR(state->ChannelNotEmptyError); + return 0; +} + /* data-sharing-specific code ***********************************************/ @@ -66,7 +243,7 @@ _sharednsitem_clear(struct _sharednsitem *item) PyMem_Free(item->name); item->name = NULL; } - _PyCrossInterpreterData_Release(&item->data); + (void)_release_xid_data(&item->data, 1); } static int @@ -121,8 +298,10 @@ _sharedns_free(_sharedns *shared) } static _sharedns * -_get_shared_ns(PyObject *shareable) +_get_shared_ns(PyObject *shareable, PyTypeObject *channelidtype, + int *needs_import) { + *needs_import = 0; if (shareable == NULL || shareable == Py_None) { return NULL; } @@ -144,6 +323,9 @@ _get_shared_ns(PyObject *shareable) if (_sharednsitem_init(&shared->items[i], key, value) != 0) { break; } + if (Py_TYPE(value) == channelidtype) { + *needs_import = 1; + } } if (PyErr_Occurred()) { _sharedns_free(shared); @@ -287,68 +469,97 @@ _sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass) #define CHANNEL_BOTH 0 #define CHANNEL_RECV -1 -static PyObject *ChannelError; -static PyObject *ChannelNotFoundError; -static PyObject *ChannelClosedError; -static PyObject *ChannelEmptyError; -static PyObject *ChannelNotEmptyError; +/* channel errors */ + +#define ERR_CHANNEL_NOT_FOUND -2 +#define ERR_CHANNEL_CLOSED -3 +#define ERR_CHANNEL_INTERP_CLOSED -4 +#define ERR_CHANNEL_EMPTY -5 +#define ERR_CHANNEL_NOT_EMPTY -6 +#define ERR_CHANNEL_MUTEX_INIT -7 +#define ERR_CHANNELS_MUTEX_INIT -8 +#define ERR_NO_NEXT_CHANNEL_ID -9 static int -channel_exceptions_init(PyObject *ns) +channel_exceptions_init(PyObject *mod) { - // XXX Move the exceptions into per-module memory? - - // A channel-related operation failed. - ChannelError = PyErr_NewException("_xxsubinterpreters.ChannelError", - PyExc_RuntimeError, NULL); - if (ChannelError == NULL) { - return -1; - } - if (PyDict_SetItemString(ns, "ChannelError", ChannelError) != 0) { + module_state *state = get_module_state(mod); + if (state == NULL) { return -1; } - // An operation tried to use a channel that doesn't exist. - ChannelNotFoundError = PyErr_NewException( - "_xxsubinterpreters.ChannelNotFoundError", ChannelError, NULL); - if (ChannelNotFoundError == NULL) { - return -1; - } - if (PyDict_SetItemString(ns, "ChannelNotFoundError", ChannelNotFoundError) != 0) { - return -1; - } +#define ADD(NAME, BASE) \ + do { \ + assert(state->NAME == NULL); \ + state->NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \ + if (state->NAME == NULL) { \ + return -1; \ + } \ + } while (0) + // A channel-related operation failed. + ADD(ChannelError, PyExc_RuntimeError); + // An operation tried to use a channel that doesn't exist. + ADD(ChannelNotFoundError, state->ChannelError); // An operation tried to use a closed channel. - ChannelClosedError = PyErr_NewException( - "_xxsubinterpreters.ChannelClosedError", ChannelError, NULL); - if (ChannelClosedError == NULL) { - return -1; - } - if (PyDict_SetItemString(ns, "ChannelClosedError", ChannelClosedError) != 0) { - return -1; - } - + ADD(ChannelClosedError, state->ChannelError); // An operation tried to pop from an empty channel. - ChannelEmptyError = PyErr_NewException( - "_xxsubinterpreters.ChannelEmptyError", ChannelError, NULL); - if (ChannelEmptyError == NULL) { - return -1; + ADD(ChannelEmptyError, state->ChannelError); + // An operation tried to close a non-empty channel. + ADD(ChannelNotEmptyError, state->ChannelError); +#undef ADD + + return 0; +} + +static int +handle_channel_error(int err, PyObject *mod, int64_t cid) +{ + if (err == 0) { + assert(!PyErr_Occurred()); + return 0; } - if (PyDict_SetItemString(ns, "ChannelEmptyError", ChannelEmptyError) != 0) { - return -1; + assert(err < 0); + module_state *state = get_module_state(mod); + assert(state != NULL); + if (err == ERR_CHANNEL_NOT_FOUND) { + PyErr_Format(state->ChannelNotFoundError, + "channel %" PRId64 " not found", cid); + } + else if (err == ERR_CHANNEL_CLOSED) { + PyErr_Format(state->ChannelClosedError, + "channel %" PRId64 " is closed", cid); + } + else if (err == ERR_CHANNEL_INTERP_CLOSED) { + PyErr_Format(state->ChannelClosedError, + "channel %" PRId64 " is already closed", cid); + } + else if (err == ERR_CHANNEL_EMPTY) { + PyErr_Format(state->ChannelEmptyError, + "channel %" PRId64 " is empty", cid); + } + else if (err == ERR_CHANNEL_NOT_EMPTY) { + PyErr_Format(state->ChannelNotEmptyError, + "channel %" PRId64 " may not be closed " + "if not empty (try force=True)", + cid); + } + else if (err == ERR_CHANNEL_MUTEX_INIT) { + PyErr_SetString(state->ChannelError, + "can't initialize mutex for new channel"); } - - // An operation tried to close a non-empty channel. - ChannelNotEmptyError = PyErr_NewException( - "_xxsubinterpreters.ChannelNotEmptyError", ChannelError, NULL); - if (ChannelNotEmptyError == NULL) { - return -1; + else if (err == ERR_CHANNELS_MUTEX_INIT) { + PyErr_SetString(state->ChannelError, + "can't initialize mutex for channel management"); } - if (PyDict_SetItemString(ns, "ChannelNotEmptyError", ChannelNotEmptyError) != 0) { - return -1; + else if (err == ERR_NO_NEXT_CHANNEL_ID) { + PyErr_SetString(state->ChannelError, + "failed to get a channel ID"); } - - return 0; + else { + assert(PyErr_Occurred()); + } + return 1; } /* the channel queue */ @@ -377,7 +588,7 @@ static void _channelitem_clear(_channelitem *item) { if (item->data != NULL) { - _PyCrossInterpreterData_Release(item->data); + (void)_release_xid_data(item->data, 1); PyMem_Free(item->data); item->data = NULL; } @@ -621,8 +832,7 @@ _channelends_associate(_channelends *ends, int64_t interp, int send) interp, &prev); if (end != NULL) { if (!end->open) { - PyErr_SetString(ChannelClosedError, "channel already closed"); - return -1; + return ERR_CHANNEL_CLOSED; } // already associated return 0; @@ -721,19 +931,13 @@ typedef struct _channel { } _PyChannelState; static _PyChannelState * -_channel_new(void) +_channel_new(PyThread_type_lock mutex) { _PyChannelState *chan = PyMem_NEW(_PyChannelState, 1); if (chan == NULL) { return NULL; } - chan->mutex = PyThread_allocate_lock(); - if (chan->mutex == NULL) { - PyMem_Free(chan); - PyErr_SetString(ChannelError, - "can't initialize mutex for new channel"); - return NULL; - } + chan->mutex = mutex; chan->queue = _channelqueue_new(); if (chan->queue == NULL) { PyMem_Free(chan); @@ -771,10 +975,11 @@ _channel_add(_PyChannelState *chan, int64_t interp, PyThread_acquire_lock(chan->mutex, WAIT_LOCK); if (!chan->open) { - PyErr_SetString(ChannelClosedError, "channel closed"); + res = ERR_CHANNEL_CLOSED; goto done; } if (_channelends_associate(chan->ends, interp, 1) != 0) { + res = ERR_CHANNEL_INTERP_CLOSED; goto done; } @@ -788,31 +993,34 @@ _channel_add(_PyChannelState *chan, int64_t interp, return res; } -static _PyCrossInterpreterData * -_channel_next(_PyChannelState *chan, int64_t interp) +static int +_channel_next(_PyChannelState *chan, int64_t interp, + _PyCrossInterpreterData **res) { - _PyCrossInterpreterData *data = NULL; + int err = 0; PyThread_acquire_lock(chan->mutex, WAIT_LOCK); if (!chan->open) { - PyErr_SetString(ChannelClosedError, "channel closed"); + err = ERR_CHANNEL_CLOSED; goto done; } if (_channelends_associate(chan->ends, interp, 0) != 0) { + err = ERR_CHANNEL_INTERP_CLOSED; goto done; } - data = _channelqueue_get(chan->queue); + _PyCrossInterpreterData *data = _channelqueue_get(chan->queue); if (data == NULL && !PyErr_Occurred() && chan->closing != NULL) { chan->open = 0; } + *res = data; done: PyThread_release_lock(chan->mutex); if (chan->queue->count == 0) { _channel_finish_closing(chan); } - return data; + return err; } static int @@ -822,7 +1030,7 @@ _channel_close_interpreter(_PyChannelState *chan, int64_t interp, int end) int res = -1; if (!chan->open) { - PyErr_SetString(ChannelClosedError, "channel already closed"); + res = ERR_CHANNEL_CLOSED; goto done; } @@ -844,13 +1052,12 @@ _channel_close_all(_PyChannelState *chan, int end, int force) PyThread_acquire_lock(chan->mutex, WAIT_LOCK); if (!chan->open) { - PyErr_SetString(ChannelClosedError, "channel already closed"); + res = ERR_CHANNEL_CLOSED; goto done; } if (!force && chan->queue->count > 0) { - PyErr_SetString(ChannelNotEmptyError, - "may not be closed if not empty (try force=True)"); + res = ERR_CHANNEL_NOT_EMPTY; goto done; } @@ -935,21 +1142,24 @@ typedef struct _channels { int64_t next_id; } _channels; -static int -_channels_init(_channels *channels) +static void +_channels_init(_channels *channels, PyThread_type_lock mutex) { - if (channels->mutex == NULL) { - channels->mutex = PyThread_allocate_lock(); - if (channels->mutex == NULL) { - PyErr_SetString(ChannelError, - "can't initialize mutex for channel management"); - return -1; - } - } + channels->mutex = mutex; channels->head = NULL; channels->numopen = 0; channels->next_id = 0; - return 0; +} + +static void +_channels_fini(_channels *channels) +{ + assert(channels->numopen == 0); + assert(channels->head == NULL); + if (channels->mutex != NULL) { + PyThread_free_lock(channels->mutex); + channels->mutex = NULL; + } } static int64_t @@ -958,17 +1168,17 @@ _channels_next_id(_channels *channels) // needs lock int64_t id = channels->next_id; if (id < 0) { /* overflow */ - PyErr_SetString(ChannelError, - "failed to get a channel ID"); return -1; } channels->next_id += 1; return id; } -static _PyChannelState * -_channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex) +static int +_channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex, + _PyChannelState **res) { + int err = -1; _PyChannelState *chan = NULL; PyThread_acquire_lock(channels->mutex, WAIT_LOCK); if (pmutex != NULL) { @@ -977,11 +1187,11 @@ _channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex) _channelref *ref = _channelref_find(channels->head, id, NULL); if (ref == NULL) { - PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", id); + err = ERR_CHANNEL_NOT_FOUND; goto done; } if (ref->chan == NULL || !ref->chan->open) { - PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", id); + err = ERR_CHANNEL_CLOSED; goto done; } @@ -991,11 +1201,14 @@ _channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex) } chan = ref->chan; + err = 0; + done: if (pmutex == NULL || *pmutex == NULL) { PyThread_release_lock(channels->mutex); } - return chan; + *res = chan; + return err; } static int64_t @@ -1007,6 +1220,7 @@ _channels_add(_channels *channels, _PyChannelState *chan) // Create a new ref. int64_t id = _channels_next_id(channels); if (id < 0) { + cid = ERR_NO_NEXT_CHANNEL_ID; goto done; } _channelref *ref = _channelref_new(id, chan); @@ -1041,31 +1255,32 @@ _channels_close(_channels *channels, int64_t cid, _PyChannelState **pchan, _channelref *ref = _channelref_find(channels->head, cid, NULL); if (ref == NULL) { - PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", cid); + res = ERR_CHANNEL_NOT_FOUND; goto done; } if (ref->chan == NULL) { - PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", cid); + res = ERR_CHANNEL_CLOSED; goto done; } else if (!force && end == CHANNEL_SEND && ref->chan->closing != NULL) { - PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", cid); + res = ERR_CHANNEL_CLOSED; goto done; } else { - if (_channel_close_all(ref->chan, end, force) != 0) { - if (end == CHANNEL_SEND && - PyErr_ExceptionMatches(ChannelNotEmptyError)) { + int err = _channel_close_all(ref->chan, end, force); + if (err != 0) { + if (end == CHANNEL_SEND && err == ERR_CHANNEL_NOT_EMPTY) { if (ref->chan->closing != NULL) { - PyErr_Format(ChannelClosedError, - "channel %" PRId64 " closed", cid); + res = ERR_CHANNEL_CLOSED; goto done; } // Mark the channel as closing and return. The channel // will be cleaned up in _channel_next(). PyErr_Clear(); - if (_channel_set_closing(ref, channels->mutex) != 0) { + int err = _channel_set_closing(ref, channels->mutex); + if (err != 0) { + res = err; goto done; } if (pchan != NULL) { @@ -1073,6 +1288,9 @@ _channels_close(_channels *channels, int64_t cid, _PyChannelState **pchan, } res = 0; } + else { + res = err; + } goto done; } if (pchan != NULL) { @@ -1121,7 +1339,7 @@ _channels_remove(_channels *channels, int64_t id, _PyChannelState **pchan) _channelref *prev = NULL; _channelref *ref = _channelref_find(channels->head, id, &prev); if (ref == NULL) { - PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", id); + res = ERR_CHANNEL_NOT_FOUND; goto done; } @@ -1141,7 +1359,7 @@ _channels_add_id_object(_channels *channels, int64_t id) _channelref *ref = _channelref_find(channels->head, id, NULL); if (ref == NULL) { - PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", id); + res = ERR_CHANNEL_NOT_FOUND; goto done; } ref->objcount += 1; @@ -1215,7 +1433,7 @@ _channel_set_closing(struct _channelref *ref, PyThread_type_lock mutex) { int res = -1; PyThread_acquire_lock(chan->mutex, WAIT_LOCK); if (chan->closing != NULL) { - PyErr_SetString(ChannelClosedError, "channel closed"); + res = ERR_CHANNEL_CLOSED; goto done; } chan->closing = PyMem_NEW(struct _channel_closing, 1); @@ -1258,14 +1476,18 @@ _channel_finish_closing(struct _channel *chan) { static int64_t _channel_create(_channels *channels) { - _PyChannelState *chan = _channel_new(); + PyThread_type_lock mutex = PyThread_allocate_lock(); + if (mutex == NULL) { + return ERR_CHANNEL_MUTEX_INIT; + } + _PyChannelState *chan = _channel_new(mutex); if (chan == NULL) { + PyThread_free_lock(mutex); return -1; } int64_t id = _channels_add(channels, chan); if (id < 0) { _channel_free(chan); - return -1; } return id; } @@ -1274,8 +1496,9 @@ static int _channel_destroy(_channels *channels, int64_t id) { _PyChannelState *chan = NULL; - if (_channels_remove(channels, id, &chan) != 0) { - return -1; + int err = _channels_remove(channels, id, &chan); + if (err != 0) { + return err; } if (chan != NULL) { _channel_free(chan); @@ -1286,23 +1509,24 @@ _channel_destroy(_channels *channels, int64_t id) static int _channel_send(_channels *channels, int64_t id, PyObject *obj) { - PyInterpreterState *interp = _get_current(); + PyInterpreterState *interp = _get_current_interp(); if (interp == NULL) { return -1; } // Look up the channel. PyThread_type_lock mutex = NULL; - _PyChannelState *chan = _channels_lookup(channels, id, &mutex); - if (chan == NULL) { - return -1; + _PyChannelState *chan = NULL; + int err = _channels_lookup(channels, id, &mutex, &chan); + if (err != 0) { + return err; } + assert(chan != NULL); // Past this point we are responsible for releasing the mutex. if (chan->closing != NULL) { - PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", id); PyThread_release_lock(mutex); - return -1; + return ERR_CHANNEL_CLOSED; } // Convert the object to cross-interpreter data. @@ -1321,61 +1545,87 @@ _channel_send(_channels *channels, int64_t id, PyObject *obj) int res = _channel_add(chan, PyInterpreterState_GetID(interp), data); PyThread_release_lock(mutex); if (res != 0) { - _PyCrossInterpreterData_Release(data); + // We may chain an exception here: + (void)_release_xid_data(data, 0); PyMem_Free(data); - return -1; + return res; } return 0; } -static PyObject * -_channel_recv(_channels *channels, int64_t id) +static int +_channel_recv(_channels *channels, int64_t id, PyObject **res) { - PyInterpreterState *interp = _get_current(); + int err; + *res = NULL; + + PyInterpreterState *interp = _get_current_interp(); if (interp == NULL) { - return NULL; + // XXX Is this always an error? + if (PyErr_Occurred()) { + return -1; + } + return 0; } // Look up the channel. PyThread_type_lock mutex = NULL; - _PyChannelState *chan = _channels_lookup(channels, id, &mutex); - if (chan == NULL) { - return NULL; + _PyChannelState *chan = NULL; + err = _channels_lookup(channels, id, &mutex, &chan); + if (err != 0) { + return err; } + assert(chan != NULL); // Past this point we are responsible for releasing the mutex. // Pop off the next item from the channel. - _PyCrossInterpreterData *data = _channel_next(chan, PyInterpreterState_GetID(interp)); + _PyCrossInterpreterData *data = NULL; + err = _channel_next(chan, PyInterpreterState_GetID(interp), &data); PyThread_release_lock(mutex); - if (data == NULL) { - return NULL; + if (err != 0) { + return err; + } + else if (data == NULL) { + assert(!PyErr_Occurred()); + return 0; } // Convert the data back to an object. PyObject *obj = _PyCrossInterpreterData_NewObject(data); - _PyCrossInterpreterData_Release(data); - PyMem_Free(data); if (obj == NULL) { - return NULL; + assert(PyErr_Occurred()); + (void)_release_xid_data(data, 1); + PyMem_Free(data); + return -1; + } + int release_res = _release_xid_data(data, 0); + PyMem_Free(data); + if (release_res < 0) { + // The source interpreter has been destroyed already. + assert(PyErr_Occurred()); + Py_DECREF(obj); + return -1; } - return obj; + *res = obj; + return 0; } static int _channel_drop(_channels *channels, int64_t id, int send, int recv) { - PyInterpreterState *interp = _get_current(); + PyInterpreterState *interp = _get_current_interp(); if (interp == NULL) { return -1; } // Look up the channel. PyThread_type_lock mutex = NULL; - _PyChannelState *chan = _channels_lookup(channels, id, &mutex); - if (chan == NULL) { - return -1; + _PyChannelState *chan = NULL; + int err = _channels_lookup(channels, id, &mutex, &chan); + if (err != 0) { + return err; } // Past this point we are responsible for releasing the mutex. @@ -1395,12 +1645,13 @@ static int _channel_is_associated(_channels *channels, int64_t cid, int64_t interp, int send) { - _PyChannelState *chan = _channels_lookup(channels, cid, NULL); - if (chan == NULL) { - return -1; - } else if (send && chan->closing != NULL) { - PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", cid); - return -1; + _PyChannelState *chan = NULL; + int err = _channels_lookup(channels, cid, NULL, &chan); + if (err != 0) { + return err; + } + else if (send && chan->closing != NULL) { + return ERR_CHANNEL_CLOSED; } _channelend *end = _channelend_find(send ? chan->ends->send : chan->ends->recv, @@ -1411,8 +1662,6 @@ _channel_is_associated(_channels *channels, int64_t cid, int64_t interp, /* ChannelID class */ -static PyTypeObject ChannelIDtype; - typedef struct channelid { PyObject_HEAD int64_t id; @@ -1421,11 +1670,19 @@ typedef struct channelid { _channels *channels; } channelid; +struct channel_id_converter_data { + PyObject *module; + int64_t cid; +}; + static int channel_id_converter(PyObject *arg, void *ptr) { int64_t cid; - if (PyObject_TypeCheck(arg, &ChannelIDtype)) { + struct channel_id_converter_data *data = ptr; + module_state *state = get_module_state(data->module); + assert(state != NULL); + if (PyObject_TypeCheck(arg, state->ChannelIDType)) { cid = ((channelid *)arg)->id; } else if (PyIndex_Check(arg)) { @@ -1445,51 +1702,62 @@ channel_id_converter(PyObject *arg, void *ptr) Py_TYPE(arg)->tp_name); return 0; } - *(int64_t *)ptr = cid; + data->cid = cid; return 1; } -static channelid * +static int newchannelid(PyTypeObject *cls, int64_t cid, int end, _channels *channels, - int force, int resolve) + int force, int resolve, channelid **res) { + *res = NULL; + channelid *self = PyObject_New(channelid, cls); if (self == NULL) { - return NULL; + return -1; } self->id = cid; self->end = end; self->resolve = resolve; self->channels = channels; - if (_channels_add_id_object(channels, cid) != 0) { - if (force && PyErr_ExceptionMatches(ChannelNotFoundError)) { - PyErr_Clear(); + int err = _channels_add_id_object(channels, cid); + if (err != 0) { + if (force && err == ERR_CHANNEL_NOT_FOUND) { + assert(!PyErr_Occurred()); } else { Py_DECREF((PyObject *)self); - return NULL; + return err; } } - return self; + *res = self; + return 0; } static _channels * _global_channels(void); static PyObject * -channelid_new(PyTypeObject *cls, PyObject *args, PyObject *kwds) +_channelid_new(PyObject *mod, PyTypeObject *cls, + PyObject *args, PyObject *kwds) { static char *kwlist[] = {"id", "send", "recv", "force", "_resolve", NULL}; int64_t cid; + struct channel_id_converter_data cid_data = { + .module = mod, + }; int send = -1; int recv = -1; int force = 0; int resolve = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&|$pppp:ChannelID.__new__", kwlist, - channel_id_converter, &cid, &send, &recv, &force, &resolve)) + channel_id_converter, &cid_data, + &send, &recv, &force, &resolve)) { return NULL; + } + cid = cid_data.cid; // Handle "send" and "recv". if (send == 0 && recv == 0) { @@ -1508,16 +1776,33 @@ channelid_new(PyTypeObject *cls, PyObject *args, PyObject *kwds) end = CHANNEL_RECV; } - return (PyObject *)newchannelid(cls, cid, end, _global_channels(), - force, resolve); + PyObject *id = NULL; + int err = newchannelid(cls, cid, end, _global_channels(), + force, resolve, + (channelid **)&id); + if (handle_channel_error(err, mod, cid)) { + assert(id == NULL); + return NULL; + } + assert(id != NULL); + return id; } static void -channelid_dealloc(PyObject *v) +channelid_dealloc(PyObject *self) { - int64_t cid = ((channelid *)v)->id; - _channels *channels = ((channelid *)v)->channels; - Py_TYPE(v)->tp_free(v); + int64_t cid = ((channelid *)self)->id; + _channels *channels = ((channelid *)self)->channels; + + PyTypeObject *tp = Py_TYPE(self); + tp->tp_free(self); + /* "Instances of heap-allocated types hold a reference to their type." + * See: https://docs.python.org/3.11/howto/isolating-extensions.html#garbage-collection-protocol + * See: https://docs.python.org/3.11/c-api/typeobj.html#c.PyTypeObject.tp_traverse + */ + // XXX Why don't we implement Py_TPFLAGS_HAVE_GC, e.g. Py_tp_traverse, + // like we do for _abc._abc_data? + Py_DECREF(tp); _channels_drop_id_object(channels, cid); } @@ -1556,46 +1841,6 @@ channelid_int(PyObject *self) return PyLong_FromLongLong(cid->id); } -static PyNumberMethods channelid_as_number = { - 0, /* nb_add */ - 0, /* nb_subtract */ - 0, /* nb_multiply */ - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - 0, /* nb_bool */ - 0, /* nb_invert */ - 0, /* nb_lshift */ - 0, /* nb_rshift */ - 0, /* nb_and */ - 0, /* nb_xor */ - 0, /* nb_or */ - (unaryfunc)channelid_int, /* nb_int */ - 0, /* nb_reserved */ - 0, /* nb_float */ - - 0, /* nb_inplace_add */ - 0, /* nb_inplace_subtract */ - 0, /* nb_inplace_multiply */ - 0, /* nb_inplace_remainder */ - 0, /* nb_inplace_power */ - 0, /* nb_inplace_lshift */ - 0, /* nb_inplace_rshift */ - 0, /* nb_inplace_and */ - 0, /* nb_inplace_xor */ - 0, /* nb_inplace_or */ - - 0, /* nb_floor_divide */ - 0, /* nb_true_divide */ - 0, /* nb_inplace_floor_divide */ - 0, /* nb_inplace_true_divide */ - - (unaryfunc)channelid_int, /* nb_index */ -}; - static Py_hash_t channelid_hash(PyObject *self) { @@ -1612,17 +1857,28 @@ channelid_hash(PyObject *self) static PyObject * channelid_richcompare(PyObject *self, PyObject *other, int op) { + PyObject *res = NULL; if (op != Py_EQ && op != Py_NE) { Py_RETURN_NOTIMPLEMENTED; } - if (!PyObject_TypeCheck(self, &ChannelIDtype)) { - Py_RETURN_NOTIMPLEMENTED; + PyObject *mod = get_module_from_type(Py_TYPE(self)); + if (mod == NULL) { + return NULL; + } + module_state *state = get_module_state(mod); + if (state == NULL) { + goto done; + } + + if (!PyObject_TypeCheck(self, state->ChannelIDType)) { + res = Py_NewRef(Py_NotImplemented); + goto done; } channelid *cid = (channelid *)self; int equal; - if (PyObject_TypeCheck(other, &ChannelIDtype)) { + if (PyObject_TypeCheck(other, state->ChannelIDType)) { channelid *othercid = (channelid *)other; equal = (cid->end == othercid->end) && (cid->id == othercid->id); } @@ -1631,27 +1887,34 @@ channelid_richcompare(PyObject *self, PyObject *other, int op) int overflow; long long othercid = PyLong_AsLongLongAndOverflow(other, &overflow); if (othercid == -1 && PyErr_Occurred()) { - return NULL; + goto done; } equal = !overflow && (othercid >= 0) && (cid->id == othercid); } else if (PyNumber_Check(other)) { PyObject *pyid = PyLong_FromLongLong(cid->id); if (pyid == NULL) { - return NULL; + goto done; } - PyObject *res = PyObject_RichCompare(pyid, other, op); + res = PyObject_RichCompare(pyid, other, op); Py_DECREF(pyid); - return res; + goto done; } else { - Py_RETURN_NOTIMPLEMENTED; + res = Py_NewRef(Py_NotImplemented); + goto done; } if ((op == Py_EQ && equal) || (op == Py_NE && !equal)) { - Py_RETURN_TRUE; + res = Py_NewRef(Py_True); } - Py_RETURN_FALSE; + else { + res = Py_NewRef(Py_False); + } + +done: + Py_DECREF(mod); + return res; } static PyObject * @@ -1690,42 +1953,63 @@ static PyObject * _channelid_from_xid(_PyCrossInterpreterData *data) { struct _channelid_xid *xid = (struct _channelid_xid *)data->data; + + PyObject *mod = _get_current_module(); + if (mod == NULL) { + return NULL; + } + module_state *state = get_module_state(mod); + if (state == NULL) { + return NULL; + } + // Note that we do not preserve the "resolve" flag. - PyObject *cid = (PyObject *)newchannelid(&ChannelIDtype, xid->id, xid->end, - _global_channels(), 0, 0); + PyObject *cid = NULL; + int err = newchannelid(state->ChannelIDType, xid->id, xid->end, + _global_channels(), 0, 0, + (channelid **)&cid); + if (err != 0) { + assert(cid == NULL); + (void)handle_channel_error(err, mod, xid->id); + goto done; + } + assert(cid != NULL); if (xid->end == 0) { - return cid; + goto done; } if (!xid->resolve) { - return cid; + goto done; } /* Try returning a high-level channel end but fall back to the ID. */ PyObject *chan = _channel_from_cid(cid, xid->end); if (chan == NULL) { PyErr_Clear(); - return cid; + goto done; } Py_DECREF(cid); - return chan; + cid = chan; + +done: + Py_DECREF(mod); + return cid; } static int -_channelid_shared(PyObject *obj, _PyCrossInterpreterData *data) -{ - struct _channelid_xid *xid = PyMem_NEW(struct _channelid_xid, 1); - if (xid == NULL) { +_channelid_shared(PyThreadState *tstate, PyObject *obj, + _PyCrossInterpreterData *data) +{ + if (_PyCrossInterpreterData_InitWithSize( + data, tstate->interp, sizeof(struct _channelid_xid), obj, + _channelid_from_xid + ) < 0) + { return -1; } + struct _channelid_xid *xid = (struct _channelid_xid *)data->data; xid->id = ((channelid *)obj)->id; xid->end = ((channelid *)obj)->end; xid->resolve = ((channelid *)obj)->resolve; - - data->data = xid; - Py_INCREF(obj); - data->obj = obj; - data->new_object = _channelid_from_xid; - data->free = PyMem_Free; return 0; } @@ -1735,8 +2019,22 @@ channelid_end(PyObject *self, void *end) int force = 1; channelid *cid = (channelid *)self; if (end != NULL) { - return (PyObject *)newchannelid(Py_TYPE(self), cid->id, *(int *)end, - cid->channels, force, cid->resolve); + PyObject *id = NULL; + int err = newchannelid(Py_TYPE(self), cid->id, *(int *)end, + cid->channels, force, cid->resolve, + (channelid **)&id); + if (err != 0) { + assert(id == NULL); + PyObject *mod = get_module_from_type(Py_TYPE(self)); + if (mod == NULL) { + return NULL; + } + (void)handle_channel_error(err, mod, cid->id); + Py_DECREF(mod); + return NULL; + } + assert(id != NULL); + return id; } if (cid->end == CHANNEL_SEND) { @@ -1764,66 +2062,52 @@ static PyGetSetDef channelid_getsets[] = { PyDoc_STRVAR(channelid_doc, "A channel ID identifies a channel and may be used as an int."); -static PyTypeObject ChannelIDtype = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - "_xxsubinterpreters.ChannelID", /* tp_name */ - sizeof(channelid), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)channelid_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)channelid_repr, /* tp_repr */ - &channelid_as_number, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - channelid_hash, /* tp_hash */ - 0, /* tp_call */ - (reprfunc)channelid_str, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - // Use Py_TPFLAGS_DISALLOW_INSTANTIATION so the type cannot be instantiated - // from Python code. We do this because there is a strong relationship - // between channel IDs and the channel lifecycle, so this limitation avoids - // related complications. Use the _channel_id() function instead. - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE - | Py_TPFLAGS_DISALLOW_INSTANTIATION, /* tp_flags */ - channelid_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - channelid_richcompare, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - channelid_getsets, /* tp_getset */ +static PyType_Slot ChannelIDType_slots[] = { + {Py_tp_dealloc, (destructor)channelid_dealloc}, + {Py_tp_doc, (void *)channelid_doc}, + {Py_tp_repr, (reprfunc)channelid_repr}, + {Py_tp_str, (reprfunc)channelid_str}, + {Py_tp_hash, channelid_hash}, + {Py_tp_richcompare, channelid_richcompare}, + {Py_tp_getset, channelid_getsets}, + // number slots + {Py_nb_int, (unaryfunc)channelid_int}, + {Py_nb_index, (unaryfunc)channelid_int}, + {0, NULL}, }; +static PyType_Spec ChannelIDType_spec = { + .name = "_xxsubinterpreters.ChannelID", + .basicsize = sizeof(channelid), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_IMMUTABLETYPE), + .slots = ChannelIDType_slots, +}; -/* interpreter-specific code ************************************************/ -static PyObject * RunFailedError = NULL; +/* interpreter-specific code ************************************************/ static int -interp_exceptions_init(PyObject *ns) +interp_exceptions_init(PyObject *mod) { - // XXX Move the exceptions into per-module memory? - - if (RunFailedError == NULL) { - // An uncaught exception came out of interp_run_string(). - RunFailedError = PyErr_NewException("_xxsubinterpreters.RunFailedError", - PyExc_RuntimeError, NULL); - if (RunFailedError == NULL) { - return -1; - } - if (PyDict_SetItemString(ns, "RunFailedError", RunFailedError) != 0) { - return -1; - } + module_state *state = get_module_state(mod); + if (state == NULL) { + return -1; } +#define ADD(NAME, BASE) \ + do { \ + assert(state->NAME == NULL); \ + state->NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \ + if (state->NAME == NULL) { \ + return -1; \ + } \ + } while (0) + + // An uncaught exception came out of interp_run_string(). + ADD(RunFailedError, PyExc_RuntimeError); +#undef ADD + return 0; } @@ -1861,12 +2145,24 @@ _ensure_not_running(PyInterpreterState *interp) static int _run_script(PyInterpreterState *interp, const char *codestr, - _sharedns *shared, _sharedexception **exc) + _sharedns *shared, int needs_import, + _sharedexception **exc) { PyObject *exctype = NULL; PyObject *excval = NULL; PyObject *tb = NULL; + if (needs_import) { + // It might not have been imported yet in the current interpreter. + // However, it will (almost) always have been imported already + // in the main interpreter. + PyObject *mod = PyImport_ImportModule(MODULE_NAME); + if (mod == NULL) { + goto error; + } + Py_DECREF(mod); + } + PyObject *main_mod = _PyInterpreterState_GetMainModule(interp); if (main_mod == NULL) { goto error; @@ -1919,14 +2215,17 @@ _run_script(PyInterpreterState *interp, const char *codestr, } static int -_run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, - PyObject *shareables) +_run_script_in_interpreter(PyObject *mod, PyInterpreterState *interp, + const char *codestr, PyObject *shareables) { if (_ensure_not_running(interp) < 0) { return -1; } + module_state *state = get_module_state(mod); - _sharedns *shared = _get_shared_ns(shareables); + int needs_import = 0; + _sharedns *shared = _get_shared_ns(shareables, state->ChannelIDType, + &needs_import); if (shared == NULL && PyErr_Occurred()) { return -1; } @@ -1942,7 +2241,7 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, // Run the script. _sharedexception *exc = NULL; - int result = _run_script(interp, codestr, shared, &exc); + int result = _run_script(interp, codestr, shared, needs_import, &exc); // Switch back. if (save_tstate != NULL) { @@ -1951,7 +2250,8 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, // Propagate any exception out to the caller. if (exc != NULL) { - _sharedexception_apply(exc, RunFailedError); + assert(state != NULL); + _sharedexception_apply(exc, state->RunFailedError); _sharedexception_free(exc); } else if (result != 0) { @@ -1973,18 +2273,41 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, the data that we need to share between interpreters, so it cannot hold PyObject values. */ static struct globals { + int module_count; _channels channels; -} _globals = {{0}}; +} _globals = {0}; static int -_init_globals(void) +_globals_init(void) { - if (_channels_init(&_globals.channels) != 0) { - return -1; + // XXX This isn't thread-safe. + _globals.module_count++; + if (_globals.module_count > 1) { + // Already initialized. + return 0; + } + + assert(_globals.channels.mutex == NULL); + PyThread_type_lock mutex = PyThread_allocate_lock(); + if (mutex == NULL) { + return ERR_CHANNELS_MUTEX_INIT; } + _channels_init(&_globals.channels, mutex); return 0; } +static void +_globals_fini(void) +{ + // XXX This isn't thread-safe. + _globals.module_count--; + if (_globals.module_count > 0) { + return; + } + + _channels_fini(&_globals.channels); +} + static _channels * _global_channels(void) { return &_globals.channels; @@ -2053,7 +2376,7 @@ interp_destroy(PyObject *self, PyObject *args, PyObject *kwds) } // Ensure we don't try to destroy the current interpreter. - PyInterpreterState *current = _get_current(); + PyInterpreterState *current = _get_current_interp(); if (current == NULL) { return NULL; } @@ -2130,7 +2453,7 @@ Return a list containing the ID of every existing interpreter."); static PyObject * interp_get_current(PyObject *self, PyObject *Py_UNUSED(ignored)) { - PyInterpreterState *interp =_get_current(); + PyInterpreterState *interp =_get_current_interp(); if (interp == NULL) { return NULL; } @@ -2188,7 +2511,7 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds) } // Run the code in the interpreter. - if (_run_script_in_interpreter(interp, codestr, shared) != 0) { + if (_run_script_in_interpreter(self, interp, codestr, shared) != 0) { return NULL; } Py_RETURN_NONE; @@ -2260,16 +2583,26 @@ channel_create(PyObject *self, PyObject *Py_UNUSED(ignored)) { int64_t cid = _channel_create(&_globals.channels); if (cid < 0) { + (void)handle_channel_error(-1, self, cid); return NULL; } - PyObject *id = (PyObject *)newchannelid(&ChannelIDtype, cid, 0, - &_globals.channels, 0, 0); - if (id == NULL) { - if (_channel_destroy(&_globals.channels, cid) != 0) { + module_state *state = get_module_state(self); + if (state == NULL) { + return NULL; + } + PyObject *id = NULL; + int err = newchannelid(state->ChannelIDType, cid, 0, + &_globals.channels, 0, 0, + (channelid **)&id); + if (handle_channel_error(err, self, cid)) { + assert(id == NULL); + err = _channel_destroy(&_globals.channels, cid); + if (handle_channel_error(err, self, cid)) { // XXX issue a warning? } return NULL; } + assert(id != NULL); assert(((channelid *)id)->channels != NULL); return id; } @@ -2284,12 +2617,17 @@ channel_destroy(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"cid", NULL}; int64_t cid; + struct channel_id_converter_data cid_data = { + .module = self, + }; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&:channel_destroy", kwlist, - channel_id_converter, &cid)) { + channel_id_converter, &cid_data)) { return NULL; } + cid = cid_data.cid; - if (_channel_destroy(&_globals.channels, cid) != 0) { + int err = _channel_destroy(&_globals.channels, cid); + if (handle_channel_error(err, self, cid)) { return NULL; } Py_RETURN_NONE; @@ -2316,15 +2654,24 @@ channel_list_all(PyObject *self, PyObject *Py_UNUSED(ignored)) if (ids == NULL) { goto finally; } + module_state *state = get_module_state(self); + if (state == NULL) { + Py_DECREF(ids); + ids = NULL; + goto finally; + } int64_t *cur = cids; for (int64_t i=0; i < count; cur++, i++) { - PyObject *id = (PyObject *)newchannelid(&ChannelIDtype, *cur, 0, - &_globals.channels, 0, 0); - if (id == NULL) { - Py_DECREF(ids); - ids = NULL; + PyObject *id = NULL; + int err = newchannelid(state->ChannelIDType, *cur, 0, + &_globals.channels, 0, 0, + (channelid **)&id); + if (handle_channel_error(err, self, *cur)) { + assert(id == NULL); + Py_SETREF(ids, NULL); break; } + assert(id != NULL); PyList_SET_ITEM(ids, (Py_ssize_t)i, id); } @@ -2343,6 +2690,9 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"cid", "send", NULL}; int64_t cid; /* Channel ID */ + struct channel_id_converter_data cid_data = { + .module = self, + }; int send = 0; /* Send or receive end? */ int64_t id; PyObject *ids, *id_obj; @@ -2350,9 +2700,10 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds) if (!PyArg_ParseTupleAndKeywords( args, kwds, "O&$p:channel_list_interpreters", - kwlist, channel_id_converter, &cid, &send)) { + kwlist, channel_id_converter, &cid_data, &send)) { return NULL; } + cid = cid_data.cid; ids = PyList_New(0); if (ids == NULL) { @@ -2365,6 +2716,7 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds) assert(id >= 0); int res = _channel_is_associated(&_globals.channels, cid, id, send); if (res < 0) { + (void)handle_channel_error(res, self, cid); goto except; } if (res) { @@ -2384,8 +2736,7 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds) goto finally; except: - Py_XDECREF(ids); - ids = NULL; + Py_CLEAR(ids); finally: return ids; @@ -2405,13 +2756,18 @@ channel_send(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"cid", "obj", NULL}; int64_t cid; + struct channel_id_converter_data cid_data = { + .module = self, + }; PyObject *obj; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&O:channel_send", kwlist, - channel_id_converter, &cid, &obj)) { + channel_id_converter, &cid_data, &obj)) { return NULL; } + cid = cid_data.cid; - if (_channel_send(&_globals.channels, cid, obj) != 0) { + int err = _channel_send(&_globals.channels, cid, obj); + if (handle_channel_error(err, self, cid)) { return NULL; } Py_RETURN_NONE; @@ -2427,26 +2783,32 @@ channel_recv(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"cid", "default", NULL}; int64_t cid; + struct channel_id_converter_data cid_data = { + .module = self, + }; PyObject *dflt = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&|O:channel_recv", kwlist, - channel_id_converter, &cid, &dflt)) { + channel_id_converter, &cid_data, &dflt)) { return NULL; } - Py_XINCREF(dflt); + cid = cid_data.cid; - PyObject *obj = _channel_recv(&_globals.channels, cid); - if (obj != NULL) { - Py_XDECREF(dflt); - return obj; - } else if (PyErr_Occurred()) { - Py_XDECREF(dflt); - return NULL; - } else if (dflt != NULL) { - return dflt; - } else { - PyErr_Format(ChannelEmptyError, "channel %" PRId64 " is empty", cid); + PyObject *obj = NULL; + int err = _channel_recv(&_globals.channels, cid, &obj); + if (handle_channel_error(err, self, cid)) { return NULL; } + Py_XINCREF(dflt); + if (obj == NULL) { + // Use the default. + if (dflt == NULL) { + (void)handle_channel_error(ERR_CHANNEL_EMPTY, self, cid); + return NULL; + } + obj = Py_NewRef(dflt); + } + Py_XDECREF(dflt); + return obj; } PyDoc_STRVAR(channel_recv_doc, @@ -2462,16 +2824,22 @@ channel_close(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"cid", "send", "recv", "force", NULL}; int64_t cid; + struct channel_id_converter_data cid_data = { + .module = self, + }; int send = 0; int recv = 0; int force = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&|$ppp:channel_close", kwlist, - channel_id_converter, &cid, &send, &recv, &force)) { + channel_id_converter, &cid_data, + &send, &recv, &force)) { return NULL; } + cid = cid_data.cid; - if (_channel_close(&_globals.channels, cid, send-recv, force) != 0) { + int err = _channel_close(&_globals.channels, cid, send-recv, force); + if (handle_channel_error(err, self, cid)) { return NULL; } Py_RETURN_NONE; @@ -2510,14 +2878,19 @@ channel_release(PyObject *self, PyObject *args, PyObject *kwds) // Note that only the current interpreter is affected. static char *kwlist[] = {"cid", "send", "recv", "force", NULL}; int64_t cid; + struct channel_id_converter_data cid_data = { + .module = self, + }; int send = 0; int recv = 0; int force = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&|$ppp:channel_release", kwlist, - channel_id_converter, &cid, &send, &recv, &force)) { + channel_id_converter, &cid_data, + &send, &recv, &force)) { return NULL; } + cid = cid_data.cid; if (send == 0 && recv == 0) { send = 1; recv = 1; @@ -2526,7 +2899,8 @@ channel_release(PyObject *self, PyObject *args, PyObject *kwds) // XXX Handle force is True. // XXX Fix implicit release. - if (_channel_drop(&_globals.channels, cid, send, recv) != 0) { + int err = _channel_drop(&_globals.channels, cid, send, recv); + if (handle_channel_error(err, self, cid)) { return NULL; } Py_RETURN_NONE; @@ -2542,7 +2916,18 @@ ends are closed. Closing an already closed end is a noop."); static PyObject * channel__channel_id(PyObject *self, PyObject *args, PyObject *kwds) { - return channelid_new(&ChannelIDtype, args, kwds); + module_state *state = get_module_state(self); + if (state == NULL) { + return NULL; + } + PyTypeObject *cls = state->ChannelIDType; + PyObject *mod = get_module_from_owned_type(cls); + if (mod == NULL) { + return NULL; + } + PyObject *cid = _channelid_new(mod, cls, args, kwds); + Py_DECREF(mod); + return cid; } static PyMethodDef module_functions[] = { @@ -2593,59 +2978,94 @@ PyDoc_STRVAR(module_doc, "This module provides primitive operations to manage Python interpreters.\n\ The 'interpreters' module provides a more convenient interface."); -static struct PyModuleDef interpretersmodule = { - PyModuleDef_HEAD_INIT, - "_xxsubinterpreters", /* m_name */ - module_doc, /* m_doc */ - -1, /* m_size */ - module_functions, /* m_methods */ - NULL, /* m_slots */ - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; - - -PyMODINIT_FUNC -PyInit__xxsubinterpreters(void) +static int +module_exec(PyObject *mod) { - if (_init_globals() != 0) { - return NULL; - } - - /* Initialize types */ - if (PyType_Ready(&ChannelIDtype) != 0) { - return NULL; + if (_globals_init() != 0) { + return -1; } - /* Create the module */ - PyObject *module = PyModule_Create(&interpretersmodule); - if (module == NULL) { - return NULL; + module_state *state = get_module_state(mod); + if (state == NULL) { + goto error; } /* Add exception types */ - PyObject *ns = PyModule_GetDict(module); // borrowed - if (interp_exceptions_init(ns) != 0) { - return NULL; + if (interp_exceptions_init(mod) != 0) { + goto error; } - if (channel_exceptions_init(ns) != 0) { - return NULL; + if (channel_exceptions_init(mod) != 0) { + goto error; } /* Add other types */ - Py_INCREF(&ChannelIDtype); - if (PyDict_SetItemString(ns, "ChannelID", (PyObject *)&ChannelIDtype) != 0) { - return NULL; - } - Py_INCREF(&_PyInterpreterID_Type); - if (PyDict_SetItemString(ns, "InterpreterID", (PyObject *)&_PyInterpreterID_Type) != 0) { - return NULL; + + // ChannelID + state->ChannelIDType = add_new_type( + mod, &ChannelIDType_spec, _channelid_shared); + if (state->ChannelIDType == NULL) { + goto error; } - if (_PyCrossInterpreterData_RegisterClass(&ChannelIDtype, _channelid_shared)) { - return NULL; + // PyInterpreterID + if (PyModule_AddType(mod, &_PyInterpreterID_Type) < 0) { + goto error; } - return module; + return 0; + +error: + (void)_PyCrossInterpreterData_UnregisterClass(state->ChannelIDType); + _globals_fini(); + return -1; +} + +static struct PyModuleDef_Slot module_slots[] = { + {Py_mod_exec, module_exec}, + {0, NULL}, +}; + +static int +module_traverse(PyObject *mod, visitproc visit, void *arg) +{ + module_state *state = get_module_state(mod); + assert(state != NULL); + traverse_module_state(state, visit, arg); + return 0; +} + +static int +module_clear(PyObject *mod) +{ + module_state *state = get_module_state(mod); + assert(state != NULL); + clear_module_state(state); + return 0; +} + +static void +module_free(void *mod) +{ + module_state *state = get_module_state(mod); + assert(state != NULL); + clear_module_state(state); + _globals_fini(); +} + +static struct PyModuleDef moduledef = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = MODULE_NAME, + .m_doc = module_doc, + .m_size = sizeof(module_state), + .m_methods = module_functions, + .m_slots = module_slots, + .m_traverse = module_traverse, + .m_clear = module_clear, + .m_free = (freefunc)module_free, +}; + +PyMODINIT_FUNC +PyInit__xxsubinterpreters(void) +{ + return PyModuleDef_Init(&moduledef); } diff --git a/Modules/_xxtestfuzz/fuzzer.c b/Modules/_xxtestfuzz/fuzzer.c index 366e81a54519a7..fb0c191d2c494d 100644 --- a/Modules/_xxtestfuzz/fuzzer.c +++ b/Modules/_xxtestfuzz/fuzzer.c @@ -142,7 +142,7 @@ static int fuzz_struct_unpack(const char* data, size_t size) { } -#define MAX_JSON_TEST_SIZE 0x10000 +#define MAX_JSON_TEST_SIZE 0x100000 PyObject* json_loads_method = NULL; /* Called by LLVMFuzzerTestOneInput for initialization */ @@ -335,7 +335,7 @@ static int fuzz_sre_match(const char* data, size_t size) { return 0; } -#define MAX_CSV_TEST_SIZE 0x10000 +#define MAX_CSV_TEST_SIZE 0x100000 PyObject* csv_module = NULL; PyObject* csv_error = NULL; /* Called by LLVMFuzzerTestOneInput for initialization */ @@ -393,7 +393,7 @@ static int fuzz_csv_reader(const char* data, size_t size) { return 0; } -#define MAX_AST_LITERAL_EVAL_TEST_SIZE 0x10000 +#define MAX_AST_LITERAL_EVAL_TEST_SIZE 0x100000 PyObject* ast_literal_eval_method = NULL; /* Called by LLVMFuzzerTestOneInput for initialization */ static int init_ast_literal_eval(void) { @@ -459,6 +459,9 @@ int LLVMFuzzerInitialize(int *argc, char ***argv) { PyConfig config; PyConfig_InitPythonConfig(&config); config.install_signal_handlers = 0; + /* Raise the limit above the default allows exercising larger things + * now that we fall back to the _pylong module for large values. */ + config.int_max_str_digits = 8086; PyStatus status; status = PyConfig_SetBytesString(&config, &config.program_name, *argv[0]); if (PyStatus_Exception(status)) { diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index 36b25bf3c05469..9d38589ea3d1b0 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -220,20 +220,17 @@ zoneinfo_new_instance(PyTypeObject *type, PyObject *key) } PyObject *rv = PyObject_CallMethod(file_obj, "close", NULL); - Py_DECREF(file_obj); - file_obj = NULL; + Py_SETREF(file_obj, NULL); if (rv == NULL) { goto error; } Py_DECREF(rv); - ((PyZoneInfo_ZoneInfo *)self)->key = key; - Py_INCREF(key); + ((PyZoneInfo_ZoneInfo *)self)->key = Py_NewRef(key); goto cleanup; error: - Py_XDECREF(self); - self = NULL; + Py_CLEAR(self); cleanup: if (file_obj != NULL) { PyObject *exc, *val, *tb; @@ -381,10 +378,9 @@ zoneinfo_ZoneInfo_from_file_impl(PyTypeObject *type, PyObject *file_obj, self->source = SOURCE_FILE; self->file_repr = file_repr; - self->key = key; - Py_INCREF(key); - + self->key = Py_NewRef(key); return obj_self; + error: Py_XDECREF(file_repr); Py_XDECREF(self); @@ -484,8 +480,7 @@ zoneinfo_utcoffset(PyObject *self, PyObject *dt) if (tti == NULL) { return NULL; } - Py_INCREF(tti->utcoff); - return tti->utcoff; + return Py_NewRef(tti->utcoff); } static PyObject * @@ -495,8 +490,7 @@ zoneinfo_dst(PyObject *self, PyObject *dt) if (tti == NULL) { return NULL; } - Py_INCREF(tti->dstoff); - return tti->dstoff; + return Py_NewRef(tti->dstoff); } static PyObject * @@ -506,8 +500,7 @@ zoneinfo_tzname(PyObject *self, PyObject *dt) if (tti == NULL) { return NULL; } - Py_INCREF(tti->tzname); - return tti->tzname; + return Py_NewRef(tti->tzname); } #define GET_DT_TZINFO PyDateTime_DATE_GET_TZINFO @@ -651,8 +644,7 @@ static PyObject * zoneinfo_str(PyZoneInfo_ZoneInfo *self) { if (!(self->key == Py_None)) { - Py_INCREF(self->key); - return self->key; + return Py_NewRef(self->key); } else { return zoneinfo_repr(self); @@ -793,8 +785,7 @@ build_ttinfo(long utcoffset, long dstoffset, PyObject *tzname, _ttinfo *out) return -1; } - out->tzname = tzname; - Py_INCREF(tzname); + out->tzname = Py_NewRef(tzname); return 0; } @@ -1081,9 +1072,7 @@ load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj) // that the dstoff is set correctly in that case. if (PyObject_IsTrue(tti->dstoff)) { _ttinfo *tti_after = &(self->tzrule_after.std); - Py_DECREF(tti_after->dstoff); - tti_after->dstoff = tti->dstoff; - Py_INCREF(tti_after->dstoff); + Py_SETREF(tti_after->dstoff, Py_NewRef(tti->dstoff)); } } @@ -2285,13 +2274,10 @@ strong_cache_node_new(PyObject *key, PyObject *zone) return NULL; } - Py_INCREF(key); - Py_INCREF(zone); - node->next = NULL; node->prev = NULL; - node->key = key; - node->zone = zone; + node->key = Py_NewRef(key); + node->zone = Py_NewRef(zone); return node; } @@ -2443,8 +2429,7 @@ zone_from_strong_cache(const PyTypeObject *const type, PyObject *const key) if (node != NULL) { move_strong_cache_node_to_front(&ZONEINFO_STRONG_CACHE, node); - Py_INCREF(node->zone); - return node->zone; + return Py_NewRef(node->zone); } return NULL; // Cache miss @@ -2619,14 +2604,9 @@ static PyMethodDef module_methods[] = {{NULL, NULL}}; static void module_free(void *m) { - Py_XDECREF(_tzpath_find_tzfile); - _tzpath_find_tzfile = NULL; - - Py_XDECREF(_common_mod); - _common_mod = NULL; - - Py_XDECREF(io_open); - io_open = NULL; + Py_CLEAR(_tzpath_find_tzfile); + Py_CLEAR(_common_mod); + Py_CLEAR(io_open); xdecref_ttinfo(&NO_TTINFO); @@ -2657,8 +2637,9 @@ zoneinfomodule_exec(PyObject *m) goto error; } - Py_INCREF(&PyZoneInfo_ZoneInfoType); - PyModule_AddObject(m, "ZoneInfo", (PyObject *)&PyZoneInfo_ZoneInfoType); + if (PyModule_AddObjectRef(m, "ZoneInfo", (PyObject *)&PyZoneInfo_ZoneInfoType) < 0) { + goto error; + } /* Populate imports */ _tzpath_find_tzfile = @@ -2678,13 +2659,9 @@ zoneinfomodule_exec(PyObject *m) } if (NO_TTINFO.utcoff == NULL) { - NO_TTINFO.utcoff = Py_None; - NO_TTINFO.dstoff = Py_None; - NO_TTINFO.tzname = Py_None; - - for (size_t i = 0; i < 3; ++i) { - Py_INCREF(Py_None); - } + NO_TTINFO.utcoff = Py_NewRef(Py_None); + NO_TTINFO.dstoff = Py_NewRef(Py_None); + NO_TTINFO.tzname = Py_NewRef(Py_None); } if (initialize_caches()) { diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index d60cf26788f5a6..114c69a033593c 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -58,6 +58,8 @@ typedef struct { PyTypeObject *ArrayType; PyTypeObject *ArrayIterType; + PyObject *array_reconstructor; + PyObject *str_read; PyObject *str_write; PyObject *str___dict__; @@ -707,8 +709,7 @@ array_richcompare(PyObject *v, PyObject *w, int op) res = Py_False; else res = Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } if (va->ob_descr == wa->ob_descr && va->ob_descr->compareitems != NULL) { @@ -731,8 +732,7 @@ array_richcompare(PyObject *v, PyObject *w, int op) default: return NULL; /* cannot happen */ } PyObject *res = cmp ? Py_True : Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); } @@ -776,18 +776,15 @@ array_richcompare(PyObject *v, PyObject *w, int op) res = Py_True; else res = Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /* We have an item that differs. First, shortcuts for EQ/NE */ if (op == Py_EQ) { - Py_INCREF(Py_False); - res = Py_False; + res = Py_NewRef(Py_False); } else if (op == Py_NE) { - Py_INCREF(Py_True); - res = Py_True; + res = Py_NewRef(Py_True); } else { /* Compare the final item again using the proper operator */ @@ -1058,8 +1055,7 @@ array_inplace_concat(arrayobject *self, PyObject *bb) } if (array_do_extend(state, self, bb) == -1) return NULL; - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -1083,8 +1079,7 @@ array_inplace_repeat(arrayobject *self, Py_ssize_t n) _PyBytes_Repeat(self->ob_item, n*size, self->ob_item, size); } - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } @@ -1778,9 +1773,9 @@ static PyObject * array_array___sizeof___impl(arrayobject *self) /*[clinic end generated code: output=d8e1c61ebbe3eaed input=805586565bf2b3c6]*/ { - Py_ssize_t res; - res = _PyObject_SIZE(Py_TYPE(self)) + self->allocated * self->ob_descr->itemsize; - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(self)); + res += (size_t)self->allocated * (size_t)self->ob_descr->itemsize; + return PyLong_FromSize_t(res); } @@ -1945,9 +1940,8 @@ make_array(PyTypeObject *arraytype, char typecode, PyObject *items) Py_DECREF(typecode_obj); return NULL; } - Py_INCREF(items); PyTuple_SET_ITEM(new_args, 0, typecode_obj); - PyTuple_SET_ITEM(new_args, 1, items); + PyTuple_SET_ITEM(new_args, 1, Py_NewRef(items)); array_obj = array_new(arraytype, new_args, NULL); Py_DECREF(new_args); @@ -2191,17 +2185,17 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, PyObject *array_str; int typecode = self->ob_descr->typecode; int mformat_code; - static PyObject *array_reconstructor = NULL; long protocol; array_state *state = get_array_state_by_class(cls); assert(state != NULL); - if (array_reconstructor == NULL) { - array_reconstructor = _PyImport_GetModuleAttrString( + if (state->array_reconstructor == NULL) { + state->array_reconstructor = _PyImport_GetModuleAttrString( "array", "_array_reconstructor"); - if (array_reconstructor == NULL) + if (state->array_reconstructor == NULL) { return NULL; + } } if (!PyLong_Check(value)) { @@ -2217,8 +2211,7 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, return NULL; } if (dict == NULL) { - dict = Py_None; - Py_INCREF(dict); + dict = Py_NewRef(Py_None); } mformat_code = typecode_to_mformat_code(typecode); @@ -2252,8 +2245,10 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, Py_DECREF(dict); return NULL; } + + assert(state->array_reconstructor != NULL); result = Py_BuildValue( - "O(OCiN)O", array_reconstructor, Py_TYPE(self), typecode, + "O(OCiN)O", state->array_reconstructor, Py_TYPE(self), typecode, mformat_code, array_str, dict); Py_DECREF(dict); return result; @@ -2568,8 +2563,7 @@ array_buffer_getbuf(arrayobject *self, Py_buffer *view, int flags) } view->buf = (void *)self->ob_item; - view->obj = (PyObject*)self; - Py_INCREF(self); + view->obj = Py_NewRef(self); if (view->buf == NULL) view->buf = (void *)emptybuf; view->len = Py_SIZE(self) * self->ob_descr->itemsize; @@ -2881,8 +2875,7 @@ array_iter(arrayobject *ao) if (it == NULL) return NULL; - Py_INCREF(ao); - it->ao = ao; + it->ao = (arrayobject*)Py_NewRef(ao); it->index = 0; it->getitem = ao->ob_descr->getitem; PyObject_GC_Track(it); @@ -3013,6 +3006,7 @@ array_traverse(PyObject *module, visitproc visit, void *arg) array_state *state = get_array_state(module); Py_VISIT(state->ArrayType); Py_VISIT(state->ArrayIterType); + Py_VISIT(state->array_reconstructor); return 0; } @@ -3022,6 +3016,7 @@ array_clear(PyObject *module) array_state *state = get_array_state(module); Py_CLEAR(state->ArrayType); Py_CLEAR(state->ArrayIterType); + Py_CLEAR(state->array_reconstructor); Py_CLEAR(state->str_read); Py_CLEAR(state->str_write); Py_CLEAR(state->str___dict__); @@ -3066,6 +3061,7 @@ array_modexec(PyObject *m) PyObject *typecodes; const struct arraydescr *descr; + state->array_reconstructor = NULL; /* Add interned strings */ ADD_INTERNED(state, read); ADD_INTERNED(state, write); @@ -3076,8 +3072,8 @@ array_modexec(PyObject *m) CREATE_TYPE(m, state->ArrayIterType, &arrayiter_spec); Py_SET_TYPE(state->ArrayIterType, &PyType_Type); - Py_INCREF((PyObject *)state->ArrayType); - if (PyModule_AddObject(m, "ArrayType", (PyObject *)state->ArrayType) < 0) { + if (PyModule_AddObject(m, "ArrayType", + Py_NewRef((PyObject *)state->ArrayType)) < 0) { Py_DECREF((PyObject *)state->ArrayType); return -1; } diff --git a/Modules/audioop.c b/Modules/audioop.c index 6d484e8bba3ec7..9325f82f9a17e0 100644 --- a/Modules/audioop.c +++ b/Modules/audioop.c @@ -1471,8 +1471,7 @@ audioop_ratecv_impl(PyObject *module, Py_buffer *fragment, int width, len = (Py_ssize_t)(ncp - PyBytes_AsString(str)); rv = PyBytes_FromStringAndSize (PyBytes_AsString(str), len); - Py_DECREF(str); - str = rv; + Py_SETREF(str, rv); if (str == NULL) goto exit; rv = Py_BuildValue("(O(iO))", str, d, samps); diff --git a/Modules/binascii.c b/Modules/binascii.c index ffc2c59413613b..95ddb26988d6c9 100644 --- a/Modules/binascii.c +++ b/Modules/binascii.c @@ -303,14 +303,14 @@ binascii.b2a_uu data: Py_buffer / * - backtick: bool(accept={int}) = False + backtick: bool = False Uuencode line of data. [clinic start generated code]*/ static PyObject * binascii_b2a_uu_impl(PyObject *module, Py_buffer *data, int backtick) -/*[clinic end generated code: output=b1b99de62d9bbeb8 input=b26bc8d32b6ed2f6]*/ +/*[clinic end generated code: output=b1b99de62d9bbeb8 input=beb27822241095cd]*/ { unsigned char *ascii_data; const unsigned char *bin_data; @@ -375,7 +375,7 @@ binascii.a2b_base64 data: ascii_buffer / * - strict_mode: bool(accept={int}) = False + strict_mode: bool = False Decode a line of base64 data. @@ -386,7 +386,7 @@ Decode a line of base64 data. static PyObject * binascii_a2b_base64_impl(PyObject *module, Py_buffer *data, int strict_mode) -/*[clinic end generated code: output=5409557788d4f975 input=3a30c4e3528317c6]*/ +/*[clinic end generated code: output=5409557788d4f975 input=c0c15fd0f8f9a62d]*/ { assert(data->len >= 0); @@ -521,14 +521,14 @@ binascii.b2a_base64 data: Py_buffer / * - newline: bool(accept={int}) = True + newline: bool = True Base64-code line of data. [clinic start generated code]*/ static PyObject * binascii_b2a_base64_impl(PyObject *module, Py_buffer *data, int newline) -/*[clinic end generated code: output=4ad62c8e8485d3b3 input=6083dac5777fa45d]*/ +/*[clinic end generated code: output=4ad62c8e8485d3b3 input=0e20ff59c5f2e3e1]*/ { unsigned char *ascii_data; const unsigned char *bin_data; @@ -952,14 +952,14 @@ binascii_unhexlify_impl(PyObject *module, Py_buffer *hexstr) binascii.a2b_qp data: ascii_buffer - header: bool(accept={int}) = False + header: bool = False Decode a string of qp-encoded data. [clinic start generated code]*/ static PyObject * binascii_a2b_qp_impl(PyObject *module, Py_buffer *data, int header) -/*[clinic end generated code: output=e99f7846cfb9bc53 input=bf6766fea76cce8f]*/ +/*[clinic end generated code: output=e99f7846cfb9bc53 input=bdfb31598d4e47b9]*/ { Py_ssize_t in, out; char ch; @@ -1048,9 +1048,9 @@ to_hex (unsigned char ch, unsigned char *s) binascii.b2a_qp data: Py_buffer - quotetabs: bool(accept={int}) = False - istext: bool(accept={int}) = True - header: bool(accept={int}) = False + quotetabs: bool = False + istext: bool = True + header: bool = False Encode a string using quoted-printable encoding. @@ -1062,7 +1062,7 @@ are both encoded. When quotetabs is set, space and tabs are encoded. static PyObject * binascii_b2a_qp_impl(PyObject *module, Py_buffer *data, int quotetabs, int istext, int header) -/*[clinic end generated code: output=e9884472ebb1a94c input=21fb7eea4a184ba6]*/ +/*[clinic end generated code: output=e9884472ebb1a94c input=e9102879afb0defd]*/ { Py_ssize_t in, out; const unsigned char *databuf; diff --git a/Modules/cjkcodecs/clinic/multibytecodec.c.h b/Modules/cjkcodecs/clinic/multibytecodec.c.h index b7e340e68796fa..1b41c231eac5d8 100644 --- a/Modules/cjkcodecs/clinic/multibytecodec.c.h +++ b/Modules/cjkcodecs/clinic/multibytecodec.c.h @@ -246,8 +246,8 @@ _multibytecodec_MultibyteIncrementalEncoder_encode(MultibyteIncrementalEncoderOb if (!noptargs) { goto skip_optional_pos; } - final = _PyLong_AsInt(args[1]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[1]); + if (final < 0) { goto exit; } skip_optional_pos: @@ -381,8 +381,8 @@ _multibytecodec_MultibyteIncrementalDecoder_decode(MultibyteIncrementalDecoderOb if (!noptargs) { goto skip_optional_pos; } - final = _PyLong_AsInt(args[1]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[1]); + if (final < 0) { goto exit; } skip_optional_pos: @@ -690,4 +690,4 @@ PyDoc_STRVAR(_multibytecodec___create_codec__doc__, #define _MULTIBYTECODEC___CREATE_CODEC_METHODDEF \ {"__create_codec", (PyCFunction)_multibytecodec___create_codec, METH_O, _multibytecodec___create_codec__doc__}, -/*[clinic end generated code: output=b034ec7126c11bde input=a9049054013a1b77]*/ +/*[clinic end generated code: output=5f0e8dacddb0ac76 input=a9049054013a1b77]*/ diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 4769ab26b1b9e0..8564494f6262fb 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -141,8 +141,7 @@ codecctx_errors_get(MultibyteStatefulCodecContext *self, void *Py_UNUSED(ignored else if (self->errors == ERROR_REPLACE) errors = "replace"; else { - Py_INCREF(self->errors); - return self->errors; + return Py_NewRef(self->errors); } return PyUnicode_FromString(errors); @@ -341,8 +340,7 @@ multibytecodec_encerror(MultibyteCodec *codec, goto errorexit; } else { - Py_INCREF(tobj); - retstr = tobj; + retstr = Py_NewRef(tobj); } assert(PyBytes_Check(retstr)); @@ -786,11 +784,9 @@ encoder_encode_stateful(MultibyteStatefulEncoderContext *ctx, if (ctx->pending) { PyObject *inbuf_tmp; - Py_INCREF(ctx->pending); - origpending = ctx->pending; + origpending = Py_NewRef(ctx->pending); - Py_INCREF(ctx->pending); - inbuf_tmp = ctx->pending; + inbuf_tmp = Py_NewRef(ctx->pending); PyUnicode_Append(&inbuf_tmp, unistr); if (inbuf_tmp == NULL) goto errorexit; @@ -800,8 +796,7 @@ encoder_encode_stateful(MultibyteStatefulEncoderContext *ctx, else { origpending = NULL; - Py_INCREF(unistr); - inbuf = unistr; + inbuf = Py_NewRef(unistr); } if (PyUnicode_READY(inbuf) < 0) goto errorexit; @@ -898,14 +893,14 @@ decoder_feed_buffer(MultibyteStatefulDecoderContext *ctx, _multibytecodec.MultibyteIncrementalEncoder.encode input: object - final: bool(accept={int}) = False + final: bool = False [clinic start generated code]*/ static PyObject * _multibytecodec_MultibyteIncrementalEncoder_encode_impl(MultibyteIncrementalEncoderObject *self, PyObject *input, int final) -/*[clinic end generated code: output=123361b6c505e2c1 input=093a1ddbb2fc6721]*/ +/*[clinic end generated code: output=123361b6c505e2c1 input=bd5f7d40d43e99b0]*/ { return encoder_encode_stateful(STATEFUL_ECTX(self), input, final); } @@ -985,8 +980,7 @@ _multibytecodec_MultibyteIncrementalEncoder_setstate_impl(MultibyteIncrementalEn goto errorexit; } - Py_CLEAR(self->pending); - self->pending = pending; + Py_XSETREF(self->pending, pending); memcpy(self->state.c, statebytes+1+statebytes[0], sizeof(self->state.c)); @@ -1120,14 +1114,14 @@ static PyType_Spec encoder_spec = { _multibytecodec.MultibyteIncrementalDecoder.decode input: Py_buffer - final: bool(accept={int}) = False + final: bool = False [clinic start generated code]*/ static PyObject * _multibytecodec_MultibyteIncrementalDecoder_decode_impl(MultibyteIncrementalDecoderObject *self, Py_buffer *input, int final) -/*[clinic end generated code: output=b9b9090e8a9ce2ba input=c9132b24d503eb1d]*/ +/*[clinic end generated code: output=b9b9090e8a9ce2ba input=8795fbb20860027a]*/ { MultibyteDecodeBuffer buf; char *data, *wdata = NULL; @@ -1443,8 +1437,7 @@ mbstreamreader_iread(MultibyteStreamReaderObject *self, memcpy(ctrdata + self->pendingsize, PyBytes_AS_STRING(cres), PyBytes_GET_SIZE(cres)); - Py_DECREF(cres); - cres = ctr; + Py_SETREF(cres, ctr); self->pendingsize = 0; } @@ -1470,8 +1463,7 @@ mbstreamreader_iread(MultibyteStreamReaderObject *self, goto errorexit; } - Py_DECREF(cres); - cres = NULL; + Py_SETREF(cres, NULL); if (sizehint < 0 || buf.writer.pos != 0 || rsize == 0) break; @@ -1645,8 +1637,7 @@ mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } self->codec = ((MultibyteCodecObject *)codec)->codec; - self->stream = stream; - Py_INCREF(stream); + self->stream = Py_NewRef(stream); self->pendingsize = 0; self->errors = internal_error_callback(errors); if (self->errors == NULL) @@ -1869,8 +1860,7 @@ mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } self->codec = ((MultibyteCodecObject *)codec)->codec; - self->stream = stream; - Py_INCREF(stream); + self->stream = Py_NewRef(stream); self->pending = NULL; self->errors = internal_error_callback(errors); if (self->errors == NULL) diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h index ddec54c8d7c2bc..f2fbb352c2c69b 100644 --- a/Modules/clinic/_asynciomodule.c.h +++ b/Modules/clinic/_asynciomodule.c.h @@ -112,15 +112,19 @@ PyDoc_STRVAR(_asyncio_Future_exception__doc__, "InvalidStateError."); #define _ASYNCIO_FUTURE_EXCEPTION_METHODDEF \ - {"exception", (PyCFunction)_asyncio_Future_exception, METH_NOARGS, _asyncio_Future_exception__doc__}, + {"exception", _PyCFunction_CAST(_asyncio_Future_exception), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_exception__doc__}, static PyObject * -_asyncio_Future_exception_impl(FutureObj *self); +_asyncio_Future_exception_impl(FutureObj *self, PyTypeObject *cls); static PyObject * -_asyncio_Future_exception(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _asyncio_Future_exception_impl(self); + if (nargs) { + PyErr_SetString(PyExc_TypeError, "exception() takes no arguments"); + return NULL; + } + return _asyncio_Future_exception_impl(self, cls); } PyDoc_STRVAR(_asyncio_Future_set_result__doc__, @@ -133,7 +137,42 @@ PyDoc_STRVAR(_asyncio_Future_set_result__doc__, "InvalidStateError."); #define _ASYNCIO_FUTURE_SET_RESULT_METHODDEF \ - {"set_result", (PyCFunction)_asyncio_Future_set_result, METH_O, _asyncio_Future_set_result__doc__}, + {"set_result", _PyCFunction_CAST(_asyncio_Future_set_result), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_set_result__doc__}, + +static PyObject * +_asyncio_Future_set_result_impl(FutureObj *self, PyTypeObject *cls, + PyObject *result); + +static PyObject * +_asyncio_Future_set_result(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty) + #else + # define KWTUPLE NULL + #endif + + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "set_result", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *result; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + result = args[0]; + return_value = _asyncio_Future_set_result_impl(self, cls, result); + +exit: + return return_value; +} PyDoc_STRVAR(_asyncio_Future_set_exception__doc__, "set_exception($self, exception, /)\n" @@ -145,7 +184,42 @@ PyDoc_STRVAR(_asyncio_Future_set_exception__doc__, "InvalidStateError."); #define _ASYNCIO_FUTURE_SET_EXCEPTION_METHODDEF \ - {"set_exception", (PyCFunction)_asyncio_Future_set_exception, METH_O, _asyncio_Future_set_exception__doc__}, + {"set_exception", _PyCFunction_CAST(_asyncio_Future_set_exception), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_set_exception__doc__}, + +static PyObject * +_asyncio_Future_set_exception_impl(FutureObj *self, PyTypeObject *cls, + PyObject *exception); + +static PyObject * +_asyncio_Future_set_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty) + #else + # define KWTUPLE NULL + #endif + + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "set_exception", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *exception; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + exception = args[0]; + return_value = _asyncio_Future_set_exception_impl(self, cls, exception); + +exit: + return return_value; +} PyDoc_STRVAR(_asyncio_Future_add_done_callback__doc__, "add_done_callback($self, fn, /, *, context=<unrepresentable>)\n" @@ -158,14 +232,14 @@ PyDoc_STRVAR(_asyncio_Future_add_done_callback__doc__, "scheduled with call_soon."); #define _ASYNCIO_FUTURE_ADD_DONE_CALLBACK_METHODDEF \ - {"add_done_callback", _PyCFunction_CAST(_asyncio_Future_add_done_callback), METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_add_done_callback__doc__}, + {"add_done_callback", _PyCFunction_CAST(_asyncio_Future_add_done_callback), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_add_done_callback__doc__}, static PyObject * -_asyncio_Future_add_done_callback_impl(FutureObj *self, PyObject *fn, - PyObject *context); +_asyncio_Future_add_done_callback_impl(FutureObj *self, PyTypeObject *cls, + PyObject *fn, PyObject *context); static PyObject * -_asyncio_Future_add_done_callback(FutureObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_add_done_callback(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -208,7 +282,7 @@ _asyncio_Future_add_done_callback(FutureObj *self, PyObject *const *args, Py_ssi } context = args[1]; skip_optional_kwonly: - return_value = _asyncio_Future_add_done_callback_impl(self, fn, context); + return_value = _asyncio_Future_add_done_callback_impl(self, cls, fn, context); exit: return return_value; @@ -223,7 +297,42 @@ PyDoc_STRVAR(_asyncio_Future_remove_done_callback__doc__, "Returns the number of callbacks removed."); #define _ASYNCIO_FUTURE_REMOVE_DONE_CALLBACK_METHODDEF \ - {"remove_done_callback", (PyCFunction)_asyncio_Future_remove_done_callback, METH_O, _asyncio_Future_remove_done_callback__doc__}, + {"remove_done_callback", _PyCFunction_CAST(_asyncio_Future_remove_done_callback), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_remove_done_callback__doc__}, + +static PyObject * +_asyncio_Future_remove_done_callback_impl(FutureObj *self, PyTypeObject *cls, + PyObject *fn); + +static PyObject * +_asyncio_Future_remove_done_callback(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty) + #else + # define KWTUPLE NULL + #endif + + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "remove_done_callback", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *fn; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + fn = args[0]; + return_value = _asyncio_Future_remove_done_callback_impl(self, cls, fn); + +exit: + return return_value; +} PyDoc_STRVAR(_asyncio_Future_cancel__doc__, "cancel($self, /, msg=None)\n" @@ -236,13 +345,14 @@ PyDoc_STRVAR(_asyncio_Future_cancel__doc__, "return True."); #define _ASYNCIO_FUTURE_CANCEL_METHODDEF \ - {"cancel", _PyCFunction_CAST(_asyncio_Future_cancel), METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_cancel__doc__}, + {"cancel", _PyCFunction_CAST(_asyncio_Future_cancel), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_cancel__doc__}, static PyObject * -_asyncio_Future_cancel_impl(FutureObj *self, PyObject *msg); +_asyncio_Future_cancel_impl(FutureObj *self, PyTypeObject *cls, + PyObject *msg); static PyObject * -_asyncio_Future_cancel(FutureObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_cancel(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -283,7 +393,7 @@ _asyncio_Future_cancel(FutureObj *self, PyObject *const *args, Py_ssize_t nargs, } msg = args[0]; skip_optional_pos: - return_value = _asyncio_Future_cancel_impl(self, msg); + return_value = _asyncio_Future_cancel_impl(self, cls, msg); exit: return return_value; @@ -335,15 +445,19 @@ PyDoc_STRVAR(_asyncio_Future_get_loop__doc__, "Return the event loop the Future is bound to."); #define _ASYNCIO_FUTURE_GET_LOOP_METHODDEF \ - {"get_loop", (PyCFunction)_asyncio_Future_get_loop, METH_NOARGS, _asyncio_Future_get_loop__doc__}, + {"get_loop", _PyCFunction_CAST(_asyncio_Future_get_loop), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_get_loop__doc__}, static PyObject * -_asyncio_Future_get_loop_impl(FutureObj *self); +_asyncio_Future_get_loop_impl(FutureObj *self, PyTypeObject *cls); static PyObject * -_asyncio_Future_get_loop(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_get_loop(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _asyncio_Future_get_loop_impl(self); + if (nargs) { + PyErr_SetString(PyExc_TypeError, "get_loop() takes no arguments"); + return NULL; + } + return _asyncio_Future_get_loop_impl(self, cls); } PyDoc_STRVAR(_asyncio_Future__make_cancelled_error__doc__, @@ -612,13 +726,14 @@ PyDoc_STRVAR(_asyncio_Task_get_stack__doc__, "returned for a suspended coroutine."); #define _ASYNCIO_TASK_GET_STACK_METHODDEF \ - {"get_stack", _PyCFunction_CAST(_asyncio_Task_get_stack), METH_FASTCALL|METH_KEYWORDS, _asyncio_Task_get_stack__doc__}, + {"get_stack", _PyCFunction_CAST(_asyncio_Task_get_stack), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Task_get_stack__doc__}, static PyObject * -_asyncio_Task_get_stack_impl(TaskObj *self, PyObject *limit); +_asyncio_Task_get_stack_impl(TaskObj *self, PyTypeObject *cls, + PyObject *limit); static PyObject * -_asyncio_Task_get_stack(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_get_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -659,7 +774,7 @@ _asyncio_Task_get_stack(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, } limit = args[0]; skip_optional_kwonly: - return_value = _asyncio_Task_get_stack_impl(self, limit); + return_value = _asyncio_Task_get_stack_impl(self, cls, limit); exit: return return_value; @@ -678,14 +793,14 @@ PyDoc_STRVAR(_asyncio_Task_print_stack__doc__, "to sys.stderr."); #define _ASYNCIO_TASK_PRINT_STACK_METHODDEF \ - {"print_stack", _PyCFunction_CAST(_asyncio_Task_print_stack), METH_FASTCALL|METH_KEYWORDS, _asyncio_Task_print_stack__doc__}, + {"print_stack", _PyCFunction_CAST(_asyncio_Task_print_stack), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _asyncio_Task_print_stack__doc__}, static PyObject * -_asyncio_Task_print_stack_impl(TaskObj *self, PyObject *limit, - PyObject *file); +_asyncio_Task_print_stack_impl(TaskObj *self, PyTypeObject *cls, + PyObject *limit, PyObject *file); static PyObject * -_asyncio_Task_print_stack(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_print_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -733,7 +848,7 @@ _asyncio_Task_print_stack(TaskObj *self, PyObject *const *args, Py_ssize_t nargs } file = args[1]; skip_optional_kwonly: - return_value = _asyncio_Task_print_stack_impl(self, limit, file); + return_value = _asyncio_Task_print_stack_impl(self, cls, limit, file); exit: return return_value; @@ -872,68 +987,6 @@ _asyncio_get_event_loop(PyObject *module, PyObject *Py_UNUSED(ignored)) return _asyncio_get_event_loop_impl(module); } -PyDoc_STRVAR(_asyncio__get_event_loop__doc__, -"_get_event_loop($module, /, stacklevel=3)\n" -"--\n" -"\n"); - -#define _ASYNCIO__GET_EVENT_LOOP_METHODDEF \ - {"_get_event_loop", _PyCFunction_CAST(_asyncio__get_event_loop), METH_FASTCALL|METH_KEYWORDS, _asyncio__get_event_loop__doc__}, - -static PyObject * -_asyncio__get_event_loop_impl(PyObject *module, int stacklevel); - -static PyObject * -_asyncio__get_event_loop(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) -{ - PyObject *return_value = NULL; - #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - - #define NUM_KEYWORDS 1 - static struct { - PyGC_Head _this_is_not_used; - PyObject_VAR_HEAD - PyObject *ob_item[NUM_KEYWORDS]; - } _kwtuple = { - .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(stacklevel), }, - }; - #undef NUM_KEYWORDS - #define KWTUPLE (&_kwtuple.ob_base.ob_base) - - #else // !Py_BUILD_CORE - # define KWTUPLE NULL - #endif // !Py_BUILD_CORE - - static const char * const _keywords[] = {"stacklevel", NULL}; - static _PyArg_Parser _parser = { - .keywords = _keywords, - .fname = "_get_event_loop", - .kwtuple = KWTUPLE, - }; - #undef KWTUPLE - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - int stacklevel = 3; - - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_pos; - } - stacklevel = _PyLong_AsInt(args[0]); - if (stacklevel == -1 && PyErr_Occurred()) { - goto exit; - } -skip_optional_pos: - return_value = _asyncio__get_event_loop_impl(module, stacklevel); - -exit: - return return_value; -} - PyDoc_STRVAR(_asyncio_get_running_loop__doc__, "get_running_loop($module, /)\n" "--\n" @@ -1189,4 +1242,4 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: return return_value; } -/*[clinic end generated code: output=f117b2246eaf7a55 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=83580c190031241c input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_codecsmodule.c.h b/Modules/clinic/_codecsmodule.c.h index 25db060cd900fa..f11bcc8815b920 100644 --- a/Modules/clinic/_codecsmodule.c.h +++ b/Modules/clinic/_codecsmodule.c.h @@ -450,8 +450,8 @@ _codecs_utf_7_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -520,8 +520,8 @@ _codecs_utf_8_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -590,8 +590,8 @@ _codecs_utf_16_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -660,8 +660,8 @@ _codecs_utf_16_le_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -730,8 +730,8 @@ _codecs_utf_16_be_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -809,8 +809,8 @@ _codecs_utf_16_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 4) { goto skip_optional; } - final = _PyLong_AsInt(args[3]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[3]); + if (final < 0) { goto exit; } skip_optional: @@ -879,8 +879,8 @@ _codecs_utf_32_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -949,8 +949,8 @@ _codecs_utf_32_le_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -1019,8 +1019,8 @@ _codecs_utf_32_be_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -1098,8 +1098,8 @@ _codecs_utf_32_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 4) { goto skip_optional; } - final = _PyLong_AsInt(args[3]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[3]); + if (final < 0) { goto exit; } skip_optional: @@ -1178,8 +1178,8 @@ _codecs_unicode_escape_decode(PyObject *module, PyObject *const *args, Py_ssize_ if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -1258,8 +1258,8 @@ _codecs_raw_unicode_escape_decode(PyObject *module, PyObject *const *args, Py_ss if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -1521,8 +1521,8 @@ _codecs_mbcs_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -1595,8 +1595,8 @@ _codecs_oem_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - final = _PyLong_AsInt(args[2]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[2]); + if (final < 0) { goto exit; } skip_optional: @@ -1674,8 +1674,8 @@ _codecs_code_page_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 4) { goto skip_optional; } - final = _PyLong_AsInt(args[3]); - if (final == -1 && PyErr_Occurred()) { + final = PyObject_IsTrue(args[3]); + if (final < 0) { goto exit; } skip_optional: @@ -2869,4 +2869,4 @@ _codecs_lookup_error(PyObject *module, PyObject *arg) #ifndef _CODECS_CODE_PAGE_ENCODE_METHODDEF #define _CODECS_CODE_PAGE_ENCODE_METHODDEF #endif /* !defined(_CODECS_CODE_PAGE_ENCODE_METHODDEF) */ -/*[clinic end generated code: output=e885abad241bc54d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=603da07cf8dfeb4b input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_cursesmodule.c.h b/Modules/clinic/_cursesmodule.c.h index 67fadace863970..9d99d41af5d2d9 100644 --- a/Modules/clinic/_cursesmodule.c.h +++ b/Modules/clinic/_cursesmodule.c.h @@ -1748,7 +1748,7 @@ _curses_window_touchline(PyCursesWindowObject *self, PyObject *args) } break; case 3: - if (!PyArg_ParseTuple(args, "iii:touchline", &start, &count, &changed)) { + if (!PyArg_ParseTuple(args, "iip:touchline", &start, &count, &changed)) { goto exit; } group_right_1 = 1; @@ -1941,8 +1941,8 @@ _curses_cbreak(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - flag = _PyLong_AsInt(args[0]); - if (flag == -1 && PyErr_Occurred()) { + flag = PyObject_IsTrue(args[0]); + if (flag < 0) { goto exit; } skip_optional: @@ -2177,8 +2177,8 @@ _curses_echo(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - flag = _PyLong_AsInt(args[0]); - if (flag == -1 && PyErr_Occurred()) { + flag = PyObject_IsTrue(args[0]); + if (flag < 0) { goto exit; } skip_optional: @@ -2900,8 +2900,8 @@ _curses_intrflush(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int flag; - flag = _PyLong_AsInt(arg); - if (flag == -1 && PyErr_Occurred()) { + flag = PyObject_IsTrue(arg); + if (flag < 0) { goto exit; } return_value = _curses_intrflush_impl(module, flag); @@ -3064,8 +3064,8 @@ _curses_meta(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int yes; - yes = _PyLong_AsInt(arg); - if (yes == -1 && PyErr_Occurred()) { + yes = PyObject_IsTrue(arg); + if (yes < 0) { goto exit; } return_value = _curses_meta_impl(module, yes); @@ -3308,8 +3308,8 @@ _curses_nl(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - flag = _PyLong_AsInt(args[0]); - if (flag == -1 && PyErr_Occurred()) { + flag = PyObject_IsTrue(args[0]); + if (flag < 0) { goto exit; } skip_optional: @@ -3540,8 +3540,8 @@ _curses_qiflush(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - flag = _PyLong_AsInt(args[0]); - if (flag == -1 && PyErr_Occurred()) { + flag = PyObject_IsTrue(args[0]); + if (flag < 0) { goto exit; } skip_optional: @@ -3603,8 +3603,8 @@ _curses_raw(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - flag = _PyLong_AsInt(args[0]); - if (flag == -1 && PyErr_Occurred()) { + flag = PyObject_IsTrue(args[0]); + if (flag < 0) { goto exit; } skip_optional: @@ -4164,8 +4164,8 @@ _curses_use_env(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int flag; - flag = _PyLong_AsInt(arg); - if (flag == -1 && PyErr_Occurred()) { + flag = PyObject_IsTrue(arg); + if (flag < 0) { goto exit; } return_value = _curses_use_env_impl(module, flag); @@ -4313,4 +4313,4 @@ _curses_has_extended_color_support(PyObject *module, PyObject *Py_UNUSED(ignored #ifndef _CURSES_USE_DEFAULT_COLORS_METHODDEF #define _CURSES_USE_DEFAULT_COLORS_METHODDEF #endif /* !defined(_CURSES_USE_DEFAULT_COLORS_METHODDEF) */ -/*[clinic end generated code: output=b2e71e2012f16197 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=27a2364193b503c1 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_elementtree.c.h b/Modules/clinic/_elementtree.c.h index 0a2a74e220c2d1..33ccaf7e7c7eaf 100644 --- a/Modules/clinic/_elementtree.c.h +++ b/Modules/clinic/_elementtree.c.h @@ -106,20 +106,20 @@ PyDoc_STRVAR(_elementtree_Element___sizeof____doc__, #define _ELEMENTTREE_ELEMENT___SIZEOF___METHODDEF \ {"__sizeof__", (PyCFunction)_elementtree_Element___sizeof__, METH_NOARGS, _elementtree_Element___sizeof____doc__}, -static Py_ssize_t +static size_t _elementtree_Element___sizeof___impl(ElementObject *self); static PyObject * _elementtree_Element___sizeof__(ElementObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; - Py_ssize_t _return_value; + size_t _return_value; _return_value = _elementtree_Element___sizeof___impl(self); - if ((_return_value == -1) && PyErr_Occurred()) { + if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } - return_value = PyLong_FromSsize_t(_return_value); + return_value = PyLong_FromSize_t(_return_value); exit: return return_value; @@ -1105,4 +1105,4 @@ _elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, exit: return return_value; } -/*[clinic end generated code: output=67a80531eaf43815 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4ad006cadce01571 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h index e9ff2604719abd..adb3abc5eb2372 100644 --- a/Modules/clinic/_pickle.c.h +++ b/Modules/clinic/_pickle.c.h @@ -49,20 +49,20 @@ PyDoc_STRVAR(_pickle_Pickler___sizeof____doc__, #define _PICKLE_PICKLER___SIZEOF___METHODDEF \ {"__sizeof__", (PyCFunction)_pickle_Pickler___sizeof__, METH_NOARGS, _pickle_Pickler___sizeof____doc__}, -static Py_ssize_t +static size_t _pickle_Pickler___sizeof___impl(PicklerObject *self); static PyObject * _pickle_Pickler___sizeof__(PicklerObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; - Py_ssize_t _return_value; + size_t _return_value; _return_value = _pickle_Pickler___sizeof___impl(self); - if ((_return_value == -1) && PyErr_Occurred()) { + if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } - return_value = PyLong_FromSsize_t(_return_value); + return_value = PyLong_FromSize_t(_return_value); exit: return return_value; @@ -301,20 +301,20 @@ PyDoc_STRVAR(_pickle_Unpickler___sizeof____doc__, #define _PICKLE_UNPICKLER___SIZEOF___METHODDEF \ {"__sizeof__", (PyCFunction)_pickle_Unpickler___sizeof__, METH_NOARGS, _pickle_Unpickler___sizeof____doc__}, -static Py_ssize_t +static size_t _pickle_Unpickler___sizeof___impl(UnpicklerObject *self); static PyObject * _pickle_Unpickler___sizeof__(UnpicklerObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; - Py_ssize_t _return_value; + size_t _return_value; _return_value = _pickle_Unpickler___sizeof___impl(self); - if ((_return_value == -1) && PyErr_Occurred()) { + if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } - return_value = PyLong_FromSsize_t(_return_value); + return_value = PyLong_FromSize_t(_return_value); exit: return return_value; @@ -980,4 +980,4 @@ _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=3321309c2157ee74 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=730dc26938561313 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_ssl.c.h b/Modules/clinic/_ssl.c.h index 622e321fa1d8b3..2d7c98c4f014a3 100644 --- a/Modules/clinic/_ssl.c.h +++ b/Modules/clinic/_ssl.c.h @@ -757,8 +757,8 @@ _ssl__SSLContext__wrap_socket(PySSLContext *self, PyObject *const *args, Py_ssiz goto exit; } sock = args[0]; - server_side = _PyLong_AsInt(args[1]); - if (server_side == -1 && PyErr_Occurred()) { + server_side = PyObject_IsTrue(args[1]); + if (server_side < 0) { goto exit; } if (!noptargs) { @@ -855,8 +855,8 @@ _ssl__SSLContext__wrap_bio(PySSLContext *self, PyObject *const *args, Py_ssize_t goto exit; } outgoing = (PySSLMemoryBIO *)args[1]; - server_side = _PyLong_AsInt(args[2]); - if (server_side == -1 && PyErr_Occurred()) { + server_side = PyObject_IsTrue(args[2]); + if (server_side < 0) { goto exit; } if (!noptargs) { @@ -1543,4 +1543,4 @@ _ssl_enum_crls(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje #ifndef _SSL_ENUM_CRLS_METHODDEF #define _SSL_ENUM_CRLS_METHODDEF #endif /* !defined(_SSL_ENUM_CRLS_METHODDEF) */ -/*[clinic end generated code: output=9f477b0c709acb28 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a3d97a19163bb044 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testclinic.c.h b/Modules/clinic/_testclinic.c.h new file mode 100644 index 00000000000000..21bde529470294 --- /dev/null +++ b/Modules/clinic/_testclinic.c.h @@ -0,0 +1,2814 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(test_empty_function__doc__, +"test_empty_function($module, /)\n" +"--\n" +"\n"); + +#define TEST_EMPTY_FUNCTION_METHODDEF \ + {"test_empty_function", (PyCFunction)test_empty_function, METH_NOARGS, test_empty_function__doc__}, + +static PyObject * +test_empty_function_impl(PyObject *module); + +static PyObject * +test_empty_function(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return test_empty_function_impl(module); +} + +PyDoc_STRVAR(objects_converter__doc__, +"objects_converter($module, a, b=<unrepresentable>, /)\n" +"--\n" +"\n"); + +#define OBJECTS_CONVERTER_METHODDEF \ + {"objects_converter", _PyCFunction_CAST(objects_converter), METH_FASTCALL, objects_converter__doc__}, + +static PyObject * +objects_converter_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +objects_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *a; + PyObject *b = NULL; + + if (!_PyArg_CheckPositional("objects_converter", nargs, 1, 2)) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional; + } + b = args[1]; +skip_optional: + return_value = objects_converter_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(bytes_object_converter__doc__, +"bytes_object_converter($module, a, /)\n" +"--\n" +"\n"); + +#define BYTES_OBJECT_CONVERTER_METHODDEF \ + {"bytes_object_converter", (PyCFunction)bytes_object_converter, METH_O, bytes_object_converter__doc__}, + +static PyObject * +bytes_object_converter_impl(PyObject *module, PyBytesObject *a); + +static PyObject * +bytes_object_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyBytesObject *a; + + if (!PyBytes_Check(arg)) { + _PyArg_BadArgument("bytes_object_converter", "argument", "bytes", arg); + goto exit; + } + a = (PyBytesObject *)arg; + return_value = bytes_object_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(byte_array_object_converter__doc__, +"byte_array_object_converter($module, a, /)\n" +"--\n" +"\n"); + +#define BYTE_ARRAY_OBJECT_CONVERTER_METHODDEF \ + {"byte_array_object_converter", (PyCFunction)byte_array_object_converter, METH_O, byte_array_object_converter__doc__}, + +static PyObject * +byte_array_object_converter_impl(PyObject *module, PyByteArrayObject *a); + +static PyObject * +byte_array_object_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyByteArrayObject *a; + + if (!PyByteArray_Check(arg)) { + _PyArg_BadArgument("byte_array_object_converter", "argument", "bytearray", arg); + goto exit; + } + a = (PyByteArrayObject *)arg; + return_value = byte_array_object_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unicode_converter__doc__, +"unicode_converter($module, a, /)\n" +"--\n" +"\n"); + +#define UNICODE_CONVERTER_METHODDEF \ + {"unicode_converter", (PyCFunction)unicode_converter, METH_O, unicode_converter__doc__}, + +static PyObject * +unicode_converter_impl(PyObject *module, PyObject *a); + +static PyObject * +unicode_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyObject *a; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("unicode_converter", "argument", "str", arg); + goto exit; + } + if (PyUnicode_READY(arg) == -1) { + goto exit; + } + a = arg; + return_value = unicode_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(bool_converter__doc__, +"bool_converter($module, a=True, b=True, c=True, /)\n" +"--\n" +"\n"); + +#define BOOL_CONVERTER_METHODDEF \ + {"bool_converter", _PyCFunction_CAST(bool_converter), METH_FASTCALL, bool_converter__doc__}, + +static PyObject * +bool_converter_impl(PyObject *module, int a, int b, int c); + +static PyObject * +bool_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int a = 1; + int b = 1; + int c = 1; + + if (!_PyArg_CheckPositional("bool_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = PyObject_IsTrue(args[0]); + if (a < 0) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + b = PyObject_IsTrue(args[1]); + if (b < 0) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + c = _PyLong_AsInt(args[2]); + if (c == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = bool_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(char_converter__doc__, +"char_converter($module, a=b\'A\', b=b\'\\x07\', c=b\'\\x08\', d=b\'\\t\', e=b\'\\n\',\n" +" f=b\'\\x0b\', g=b\'\\x0c\', h=b\'\\r\', i=b\'\"\', j=b\"\'\", k=b\'?\',\n" +" l=b\'\\\\\', m=b\'\\x00\', n=b\'\\xff\', /)\n" +"--\n" +"\n"); + +#define CHAR_CONVERTER_METHODDEF \ + {"char_converter", _PyCFunction_CAST(char_converter), METH_FASTCALL, char_converter__doc__}, + +static PyObject * +char_converter_impl(PyObject *module, char a, char b, char c, char d, char e, + char f, char g, char h, char i, char j, char k, char l, + char m, char n); + +static PyObject * +char_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + char a = 'A'; + char b = '\x07'; + char c = '\x08'; + char d = '\t'; + char e = '\n'; + char f = '\x0b'; + char g = '\x0c'; + char h = '\r'; + char i = '"'; + char j = '\''; + char k = '?'; + char l = '\\'; + char m = '\x00'; + char n = '\xff'; + + if (!_PyArg_CheckPositional("char_converter", nargs, 0, 14)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (PyBytes_Check(args[0]) && PyBytes_GET_SIZE(args[0]) == 1) { + a = PyBytes_AS_STRING(args[0])[0]; + } + else if (PyByteArray_Check(args[0]) && PyByteArray_GET_SIZE(args[0]) == 1) { + a = PyByteArray_AS_STRING(args[0])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 1", "a byte string of length 1", args[0]); + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (PyBytes_Check(args[1]) && PyBytes_GET_SIZE(args[1]) == 1) { + b = PyBytes_AS_STRING(args[1])[0]; + } + else if (PyByteArray_Check(args[1]) && PyByteArray_GET_SIZE(args[1]) == 1) { + b = PyByteArray_AS_STRING(args[1])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 2", "a byte string of length 1", args[1]); + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (PyBytes_Check(args[2]) && PyBytes_GET_SIZE(args[2]) == 1) { + c = PyBytes_AS_STRING(args[2])[0]; + } + else if (PyByteArray_Check(args[2]) && PyByteArray_GET_SIZE(args[2]) == 1) { + c = PyByteArray_AS_STRING(args[2])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 3", "a byte string of length 1", args[2]); + goto exit; + } + if (nargs < 4) { + goto skip_optional; + } + if (PyBytes_Check(args[3]) && PyBytes_GET_SIZE(args[3]) == 1) { + d = PyBytes_AS_STRING(args[3])[0]; + } + else if (PyByteArray_Check(args[3]) && PyByteArray_GET_SIZE(args[3]) == 1) { + d = PyByteArray_AS_STRING(args[3])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 4", "a byte string of length 1", args[3]); + goto exit; + } + if (nargs < 5) { + goto skip_optional; + } + if (PyBytes_Check(args[4]) && PyBytes_GET_SIZE(args[4]) == 1) { + e = PyBytes_AS_STRING(args[4])[0]; + } + else if (PyByteArray_Check(args[4]) && PyByteArray_GET_SIZE(args[4]) == 1) { + e = PyByteArray_AS_STRING(args[4])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 5", "a byte string of length 1", args[4]); + goto exit; + } + if (nargs < 6) { + goto skip_optional; + } + if (PyBytes_Check(args[5]) && PyBytes_GET_SIZE(args[5]) == 1) { + f = PyBytes_AS_STRING(args[5])[0]; + } + else if (PyByteArray_Check(args[5]) && PyByteArray_GET_SIZE(args[5]) == 1) { + f = PyByteArray_AS_STRING(args[5])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 6", "a byte string of length 1", args[5]); + goto exit; + } + if (nargs < 7) { + goto skip_optional; + } + if (PyBytes_Check(args[6]) && PyBytes_GET_SIZE(args[6]) == 1) { + g = PyBytes_AS_STRING(args[6])[0]; + } + else if (PyByteArray_Check(args[6]) && PyByteArray_GET_SIZE(args[6]) == 1) { + g = PyByteArray_AS_STRING(args[6])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 7", "a byte string of length 1", args[6]); + goto exit; + } + if (nargs < 8) { + goto skip_optional; + } + if (PyBytes_Check(args[7]) && PyBytes_GET_SIZE(args[7]) == 1) { + h = PyBytes_AS_STRING(args[7])[0]; + } + else if (PyByteArray_Check(args[7]) && PyByteArray_GET_SIZE(args[7]) == 1) { + h = PyByteArray_AS_STRING(args[7])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 8", "a byte string of length 1", args[7]); + goto exit; + } + if (nargs < 9) { + goto skip_optional; + } + if (PyBytes_Check(args[8]) && PyBytes_GET_SIZE(args[8]) == 1) { + i = PyBytes_AS_STRING(args[8])[0]; + } + else if (PyByteArray_Check(args[8]) && PyByteArray_GET_SIZE(args[8]) == 1) { + i = PyByteArray_AS_STRING(args[8])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 9", "a byte string of length 1", args[8]); + goto exit; + } + if (nargs < 10) { + goto skip_optional; + } + if (PyBytes_Check(args[9]) && PyBytes_GET_SIZE(args[9]) == 1) { + j = PyBytes_AS_STRING(args[9])[0]; + } + else if (PyByteArray_Check(args[9]) && PyByteArray_GET_SIZE(args[9]) == 1) { + j = PyByteArray_AS_STRING(args[9])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 10", "a byte string of length 1", args[9]); + goto exit; + } + if (nargs < 11) { + goto skip_optional; + } + if (PyBytes_Check(args[10]) && PyBytes_GET_SIZE(args[10]) == 1) { + k = PyBytes_AS_STRING(args[10])[0]; + } + else if (PyByteArray_Check(args[10]) && PyByteArray_GET_SIZE(args[10]) == 1) { + k = PyByteArray_AS_STRING(args[10])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 11", "a byte string of length 1", args[10]); + goto exit; + } + if (nargs < 12) { + goto skip_optional; + } + if (PyBytes_Check(args[11]) && PyBytes_GET_SIZE(args[11]) == 1) { + l = PyBytes_AS_STRING(args[11])[0]; + } + else if (PyByteArray_Check(args[11]) && PyByteArray_GET_SIZE(args[11]) == 1) { + l = PyByteArray_AS_STRING(args[11])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 12", "a byte string of length 1", args[11]); + goto exit; + } + if (nargs < 13) { + goto skip_optional; + } + if (PyBytes_Check(args[12]) && PyBytes_GET_SIZE(args[12]) == 1) { + m = PyBytes_AS_STRING(args[12])[0]; + } + else if (PyByteArray_Check(args[12]) && PyByteArray_GET_SIZE(args[12]) == 1) { + m = PyByteArray_AS_STRING(args[12])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 13", "a byte string of length 1", args[12]); + goto exit; + } + if (nargs < 14) { + goto skip_optional; + } + if (PyBytes_Check(args[13]) && PyBytes_GET_SIZE(args[13]) == 1) { + n = PyBytes_AS_STRING(args[13])[0]; + } + else if (PyByteArray_Check(args[13]) && PyByteArray_GET_SIZE(args[13]) == 1) { + n = PyByteArray_AS_STRING(args[13])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 14", "a byte string of length 1", args[13]); + goto exit; + } +skip_optional: + return_value = char_converter_impl(module, a, b, c, d, e, f, g, h, i, j, k, l, m, n); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_char_converter__doc__, +"unsigned_char_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_CHAR_CONVERTER_METHODDEF \ + {"unsigned_char_converter", _PyCFunction_CAST(unsigned_char_converter), METH_FASTCALL, unsigned_char_converter__doc__}, + +static PyObject * +unsigned_char_converter_impl(PyObject *module, unsigned char a, + unsigned char b, unsigned char c); + +static PyObject * +unsigned_char_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned char a = 12; + unsigned char b = 34; + unsigned char c = 56; + + if (!_PyArg_CheckPositional("unsigned_char_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + { + long ival = PyLong_AsLong(args[0]); + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + else if (ival < 0) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is less than minimum"); + goto exit; + } + else if (ival > UCHAR_MAX) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is greater than maximum"); + goto exit; + } + else { + a = (unsigned char) ival; + } + } + if (nargs < 2) { + goto skip_optional; + } + { + long ival = PyLong_AsLong(args[1]); + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + else if (ival < 0) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is less than minimum"); + goto exit; + } + else if (ival > UCHAR_MAX) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is greater than maximum"); + goto exit; + } + else { + b = (unsigned char) ival; + } + } + if (nargs < 3) { + goto skip_optional; + } + { + unsigned long ival = PyLong_AsUnsignedLongMask(args[2]); + if (ival == (unsigned long)-1 && PyErr_Occurred()) { + goto exit; + } + else { + c = (unsigned char) ival; + } + } +skip_optional: + return_value = unsigned_char_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(short_converter__doc__, +"short_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define SHORT_CONVERTER_METHODDEF \ + {"short_converter", _PyCFunction_CAST(short_converter), METH_FASTCALL, short_converter__doc__}, + +static PyObject * +short_converter_impl(PyObject *module, short a); + +static PyObject * +short_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + short a = 12; + + if (!_PyArg_CheckPositional("short_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + { + long ival = PyLong_AsLong(args[0]); + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + else if (ival < SHRT_MIN) { + PyErr_SetString(PyExc_OverflowError, + "signed short integer is less than minimum"); + goto exit; + } + else if (ival > SHRT_MAX) { + PyErr_SetString(PyExc_OverflowError, + "signed short integer is greater than maximum"); + goto exit; + } + else { + a = (short) ival; + } + } +skip_optional: + return_value = short_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_short_converter__doc__, +"unsigned_short_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_SHORT_CONVERTER_METHODDEF \ + {"unsigned_short_converter", _PyCFunction_CAST(unsigned_short_converter), METH_FASTCALL, unsigned_short_converter__doc__}, + +static PyObject * +unsigned_short_converter_impl(PyObject *module, unsigned short a, + unsigned short b, unsigned short c); + +static PyObject * +unsigned_short_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned short a = 12; + unsigned short b = 34; + unsigned short c = 56; + + if (!_PyArg_CheckPositional("unsigned_short_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedShort_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedShort_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + c = (unsigned short)PyLong_AsUnsignedLongMask(args[2]); + if (c == (unsigned short)-1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = unsigned_short_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(int_converter__doc__, +"int_converter($module, a=12, b=34, c=45, /)\n" +"--\n" +"\n"); + +#define INT_CONVERTER_METHODDEF \ + {"int_converter", _PyCFunction_CAST(int_converter), METH_FASTCALL, int_converter__doc__}, + +static PyObject * +int_converter_impl(PyObject *module, int a, int b, int c); + +static PyObject * +int_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int a = 12; + int b = 34; + int c = 45; + + if (!_PyArg_CheckPositional("int_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = _PyLong_AsInt(args[0]); + if (a == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + b = _PyLong_AsInt(args[1]); + if (b == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!PyUnicode_Check(args[2])) { + _PyArg_BadArgument("int_converter", "argument 3", "a unicode character", args[2]); + goto exit; + } + if (PyUnicode_READY(args[2])) { + goto exit; + } + if (PyUnicode_GET_LENGTH(args[2]) != 1) { + _PyArg_BadArgument("int_converter", "argument 3", "a unicode character", args[2]); + goto exit; + } + c = PyUnicode_READ_CHAR(args[2], 0); +skip_optional: + return_value = int_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_int_converter__doc__, +"unsigned_int_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_INT_CONVERTER_METHODDEF \ + {"unsigned_int_converter", _PyCFunction_CAST(unsigned_int_converter), METH_FASTCALL, unsigned_int_converter__doc__}, + +static PyObject * +unsigned_int_converter_impl(PyObject *module, unsigned int a, unsigned int b, + unsigned int c); + +static PyObject * +unsigned_int_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned int a = 12; + unsigned int b = 34; + unsigned int c = 56; + + if (!_PyArg_CheckPositional("unsigned_int_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedInt_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedInt_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + c = (unsigned int)PyLong_AsUnsignedLongMask(args[2]); + if (c == (unsigned int)-1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = unsigned_int_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(long_converter__doc__, +"long_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define LONG_CONVERTER_METHODDEF \ + {"long_converter", _PyCFunction_CAST(long_converter), METH_FASTCALL, long_converter__doc__}, + +static PyObject * +long_converter_impl(PyObject *module, long a); + +static PyObject * +long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + long a = 12; + + if (!_PyArg_CheckPositional("long_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = PyLong_AsLong(args[0]); + if (a == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = long_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_long_converter__doc__, +"unsigned_long_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_LONG_CONVERTER_METHODDEF \ + {"unsigned_long_converter", _PyCFunction_CAST(unsigned_long_converter), METH_FASTCALL, unsigned_long_converter__doc__}, + +static PyObject * +unsigned_long_converter_impl(PyObject *module, unsigned long a, + unsigned long b, unsigned long c); + +static PyObject * +unsigned_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned long a = 12; + unsigned long b = 34; + unsigned long c = 56; + + if (!_PyArg_CheckPositional("unsigned_long_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedLong_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedLong_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!PyLong_Check(args[2])) { + _PyArg_BadArgument("unsigned_long_converter", "argument 3", "int", args[2]); + goto exit; + } + c = PyLong_AsUnsignedLongMask(args[2]); +skip_optional: + return_value = unsigned_long_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(long_long_converter__doc__, +"long_long_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define LONG_LONG_CONVERTER_METHODDEF \ + {"long_long_converter", _PyCFunction_CAST(long_long_converter), METH_FASTCALL, long_long_converter__doc__}, + +static PyObject * +long_long_converter_impl(PyObject *module, long long a); + +static PyObject * +long_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + long long a = 12; + + if (!_PyArg_CheckPositional("long_long_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = PyLong_AsLongLong(args[0]); + if (a == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = long_long_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_long_long_converter__doc__, +"unsigned_long_long_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_LONG_LONG_CONVERTER_METHODDEF \ + {"unsigned_long_long_converter", _PyCFunction_CAST(unsigned_long_long_converter), METH_FASTCALL, unsigned_long_long_converter__doc__}, + +static PyObject * +unsigned_long_long_converter_impl(PyObject *module, unsigned long long a, + unsigned long long b, unsigned long long c); + +static PyObject * +unsigned_long_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned long long a = 12; + unsigned long long b = 34; + unsigned long long c = 56; + + if (!_PyArg_CheckPositional("unsigned_long_long_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedLongLong_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedLongLong_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!PyLong_Check(args[2])) { + _PyArg_BadArgument("unsigned_long_long_converter", "argument 3", "int", args[2]); + goto exit; + } + c = PyLong_AsUnsignedLongLongMask(args[2]); +skip_optional: + return_value = unsigned_long_long_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(py_ssize_t_converter__doc__, +"py_ssize_t_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define PY_SSIZE_T_CONVERTER_METHODDEF \ + {"py_ssize_t_converter", _PyCFunction_CAST(py_ssize_t_converter), METH_FASTCALL, py_ssize_t_converter__doc__}, + +static PyObject * +py_ssize_t_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c); + +static PyObject * +py_ssize_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + Py_ssize_t a = 12; + Py_ssize_t b = 34; + Py_ssize_t c = 56; + + if (!_PyArg_CheckPositional("py_ssize_t_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = _PyNumber_Index(args[0]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + a = ival; + } + if (nargs < 2) { + goto skip_optional; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = _PyNumber_Index(args[1]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + b = ival; + } + if (nargs < 3) { + goto skip_optional; + } + if (!_Py_convert_optional_to_ssize_t(args[2], &c)) { + goto exit; + } +skip_optional: + return_value = py_ssize_t_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(slice_index_converter__doc__, +"slice_index_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define SLICE_INDEX_CONVERTER_METHODDEF \ + {"slice_index_converter", _PyCFunction_CAST(slice_index_converter), METH_FASTCALL, slice_index_converter__doc__}, + +static PyObject * +slice_index_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c); + +static PyObject * +slice_index_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + Py_ssize_t a = 12; + Py_ssize_t b = 34; + Py_ssize_t c = 56; + + if (!_PyArg_CheckPositional("slice_index_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyEval_SliceIndex(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyEval_SliceIndexNotNone(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!_PyEval_SliceIndex(args[2], &c)) { + goto exit; + } +skip_optional: + return_value = slice_index_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(size_t_converter__doc__, +"size_t_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define SIZE_T_CONVERTER_METHODDEF \ + {"size_t_converter", _PyCFunction_CAST(size_t_converter), METH_FASTCALL, size_t_converter__doc__}, + +static PyObject * +size_t_converter_impl(PyObject *module, size_t a); + +static PyObject * +size_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + size_t a = 12; + + if (!_PyArg_CheckPositional("size_t_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_Size_t_Converter(args[0], &a)) { + goto exit; + } +skip_optional: + return_value = size_t_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(float_converter__doc__, +"float_converter($module, a=12.5, /)\n" +"--\n" +"\n"); + +#define FLOAT_CONVERTER_METHODDEF \ + {"float_converter", _PyCFunction_CAST(float_converter), METH_FASTCALL, float_converter__doc__}, + +static PyObject * +float_converter_impl(PyObject *module, float a); + +static PyObject * +float_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + float a = 12.5; + + if (!_PyArg_CheckPositional("float_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (PyFloat_CheckExact(args[0])) { + a = (float) (PyFloat_AS_DOUBLE(args[0])); + } + else + { + a = (float) PyFloat_AsDouble(args[0]); + if (a == -1.0 && PyErr_Occurred()) { + goto exit; + } + } +skip_optional: + return_value = float_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(double_converter__doc__, +"double_converter($module, a=12.5, /)\n" +"--\n" +"\n"); + +#define DOUBLE_CONVERTER_METHODDEF \ + {"double_converter", _PyCFunction_CAST(double_converter), METH_FASTCALL, double_converter__doc__}, + +static PyObject * +double_converter_impl(PyObject *module, double a); + +static PyObject * +double_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + double a = 12.5; + + if (!_PyArg_CheckPositional("double_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (PyFloat_CheckExact(args[0])) { + a = PyFloat_AS_DOUBLE(args[0]); + } + else + { + a = PyFloat_AsDouble(args[0]); + if (a == -1.0 && PyErr_Occurred()) { + goto exit; + } + } +skip_optional: + return_value = double_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(py_complex_converter__doc__, +"py_complex_converter($module, a, /)\n" +"--\n" +"\n"); + +#define PY_COMPLEX_CONVERTER_METHODDEF \ + {"py_complex_converter", (PyCFunction)py_complex_converter, METH_O, py_complex_converter__doc__}, + +static PyObject * +py_complex_converter_impl(PyObject *module, Py_complex a); + +static PyObject * +py_complex_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + Py_complex a; + + a = PyComplex_AsCComplex(arg); + if (PyErr_Occurred()) { + goto exit; + } + return_value = py_complex_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(str_converter__doc__, +"str_converter($module, a=\'a\', b=\'b\', c=\'c\', /)\n" +"--\n" +"\n"); + +#define STR_CONVERTER_METHODDEF \ + {"str_converter", _PyCFunction_CAST(str_converter), METH_FASTCALL, str_converter__doc__}, + +static PyObject * +str_converter_impl(PyObject *module, const char *a, const char *b, + const char *c, Py_ssize_t c_length); + +static PyObject * +str_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + const char *a = "a"; + const char *b = "b"; + const char *c = "c"; + Py_ssize_t c_length; + + if (!_PyArg_ParseStack(args, nargs, "|sys#:str_converter", + &a, &b, &c, &c_length)) { + goto exit; + } + return_value = str_converter_impl(module, a, b, c, c_length); + +exit: + return return_value; +} + +PyDoc_STRVAR(str_converter_encoding__doc__, +"str_converter_encoding($module, a, b, c, /)\n" +"--\n" +"\n"); + +#define STR_CONVERTER_ENCODING_METHODDEF \ + {"str_converter_encoding", _PyCFunction_CAST(str_converter_encoding), METH_FASTCALL, str_converter_encoding__doc__}, + +static PyObject * +str_converter_encoding_impl(PyObject *module, char *a, char *b, char *c, + Py_ssize_t c_length); + +static PyObject * +str_converter_encoding(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + char *a = NULL; + char *b = NULL; + char *c = NULL; + Py_ssize_t c_length; + + if (!_PyArg_ParseStack(args, nargs, "esetet#:str_converter_encoding", + "idna", &a, "idna", &b, "idna", &c, &c_length)) { + goto exit; + } + return_value = str_converter_encoding_impl(module, a, b, c, c_length); + /* Post parse cleanup for a */ + PyMem_FREE(a); + /* Post parse cleanup for b */ + PyMem_FREE(b); + /* Post parse cleanup for c */ + PyMem_FREE(c); + +exit: + return return_value; +} + +PyDoc_STRVAR(py_buffer_converter__doc__, +"py_buffer_converter($module, a, b, /)\n" +"--\n" +"\n"); + +#define PY_BUFFER_CONVERTER_METHODDEF \ + {"py_buffer_converter", _PyCFunction_CAST(py_buffer_converter), METH_FASTCALL, py_buffer_converter__doc__}, + +static PyObject * +py_buffer_converter_impl(PyObject *module, Py_buffer *a, Py_buffer *b); + +static PyObject * +py_buffer_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + Py_buffer a = {NULL, NULL}; + Py_buffer b = {NULL, NULL}; + + if (!_PyArg_ParseStack(args, nargs, "z*w*:py_buffer_converter", + &a, &b)) { + goto exit; + } + return_value = py_buffer_converter_impl(module, &a, &b); + +exit: + /* Cleanup for a */ + if (a.obj) { + PyBuffer_Release(&a); + } + /* Cleanup for b */ + if (b.obj) { + PyBuffer_Release(&b); + } + + return return_value; +} + +PyDoc_STRVAR(keywords__doc__, +"keywords($module, /, a, b)\n" +"--\n" +"\n"); + +#define KEYWORDS_METHODDEF \ + {"keywords", _PyCFunction_CAST(keywords), METH_FASTCALL|METH_KEYWORDS, keywords__doc__}, + +static PyObject * +keywords_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +keywords(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = keywords_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_kwonly__doc__, +"keywords_kwonly($module, /, a, *, b)\n" +"--\n" +"\n"); + +#define KEYWORDS_KWONLY_METHODDEF \ + {"keywords_kwonly", _PyCFunction_CAST(keywords_kwonly), METH_FASTCALL|METH_KEYWORDS, keywords_kwonly__doc__}, + +static PyObject * +keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +keywords_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = keywords_kwonly_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_opt__doc__, +"keywords_opt($module, /, a, b=None, c=None)\n" +"--\n" +"\n"); + +#define KEYWORDS_OPT_METHODDEF \ + {"keywords_opt", _PyCFunction_CAST(keywords_opt), METH_FASTCALL|METH_KEYWORDS, keywords_opt__doc__}, + +static PyObject * +keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, PyObject *c); + +static PyObject * +keywords_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 3, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[1]) { + b = args[1]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + c = args[2]; +skip_optional_pos: + return_value = keywords_opt_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_opt_kwonly__doc__, +"keywords_opt_kwonly($module, /, a, b=None, *, c=None, d=None)\n" +"--\n" +"\n"); + +#define KEYWORDS_OPT_KWONLY_METHODDEF \ + {"keywords_opt_kwonly", _PyCFunction_CAST(keywords_opt_kwonly), METH_FASTCALL|METH_KEYWORDS, keywords_opt_kwonly__doc__}, + +static PyObject * +keywords_opt_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +keywords_opt_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_opt_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[1]) { + b = args[1]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + d = args[3]; +skip_optional_kwonly: + return_value = keywords_opt_kwonly_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_kwonly_opt__doc__, +"keywords_kwonly_opt($module, /, a, *, b=None, c=None)\n" +"--\n" +"\n"); + +#define KEYWORDS_KWONLY_OPT_METHODDEF \ + {"keywords_kwonly_opt", _PyCFunction_CAST(keywords_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, keywords_kwonly_opt__doc__}, + +static PyObject * +keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c); + +static PyObject * +keywords_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[1]) { + b = args[1]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + c = args[2]; +skip_optional_kwonly: + return_value = keywords_kwonly_opt_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords__doc__, +"posonly_keywords($module, a, /, b)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_METHODDEF \ + {"posonly_keywords", _PyCFunction_CAST(posonly_keywords), METH_FASTCALL|METH_KEYWORDS, posonly_keywords__doc__}, + +static PyObject * +posonly_keywords_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +posonly_keywords(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = posonly_keywords_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_kwonly__doc__, +"posonly_kwonly($module, a, /, *, b)\n" +"--\n" +"\n"); + +#define POSONLY_KWONLY_METHODDEF \ + {"posonly_kwonly", _PyCFunction_CAST(posonly_kwonly), METH_FASTCALL|METH_KEYWORDS, posonly_kwonly__doc__}, + +static PyObject * +posonly_kwonly_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +posonly_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = posonly_kwonly_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_kwonly__doc__, +"posonly_keywords_kwonly($module, a, /, b, *, c)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_KWONLY_METHODDEF \ + {"posonly_keywords_kwonly", _PyCFunction_CAST(posonly_keywords_kwonly), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_kwonly__doc__}, + +static PyObject * +posonly_keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c); + +static PyObject * +posonly_keywords_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *a; + PyObject *b; + PyObject *c; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + return_value = posonly_keywords_kwonly_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_opt__doc__, +"posonly_keywords_opt($module, a, /, b, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_OPT_METHODDEF \ + {"posonly_keywords_opt", _PyCFunction_CAST(posonly_keywords_opt), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_opt__doc__}, + +static PyObject * +posonly_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_keywords_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *b; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + d = args[3]; +skip_optional_pos: + return_value = posonly_keywords_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_opt_keywords_opt__doc__, +"posonly_opt_keywords_opt($module, a, b=None, /, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_OPT_KEYWORDS_OPT_METHODDEF \ + {"posonly_opt_keywords_opt", _PyCFunction_CAST(posonly_opt_keywords_opt), METH_FASTCALL|METH_KEYWORDS, posonly_opt_keywords_opt__doc__}, + +static PyObject * +posonly_opt_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_opt_keywords_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_opt_keywords_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional_posonly; + } + noptargs--; + b = args[1]; +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + d = args[3]; +skip_optional_pos: + return_value = posonly_opt_keywords_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_kwonly_opt__doc__, +"posonly_kwonly_opt($module, a, /, *, b, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_KWONLY_OPT_METHODDEF \ + {"posonly_kwonly_opt", _PyCFunction_CAST(posonly_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_kwonly_opt__doc__}, + +static PyObject * +posonly_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *b; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + d = args[3]; +skip_optional_kwonly: + return_value = posonly_kwonly_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_opt_kwonly_opt__doc__, +"posonly_opt_kwonly_opt($module, a, b=None, /, *, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_OPT_KWONLY_OPT_METHODDEF \ + {"posonly_opt_kwonly_opt", _PyCFunction_CAST(posonly_opt_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_opt_kwonly_opt__doc__}, + +static PyObject * +posonly_opt_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_opt_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_opt_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional_posonly; + } + noptargs--; + b = args[1]; +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + d = args[3]; +skip_optional_kwonly: + return_value = posonly_opt_kwonly_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_kwonly_opt__doc__, +"posonly_keywords_kwonly_opt($module, a, /, b, *, c, d=None, e=None)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_KWONLY_OPT_METHODDEF \ + {"posonly_keywords_kwonly_opt", _PyCFunction_CAST(posonly_keywords_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_kwonly_opt__doc__}, + +static PyObject * +posonly_keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d, PyObject *e); + +static PyObject * +posonly_keywords_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), &_Py_ID(e), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", "e", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[5]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; + PyObject *a; + PyObject *b; + PyObject *c; + PyObject *d = Py_None; + PyObject *e = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[3]) { + d = args[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + e = args[4]; +skip_optional_kwonly: + return_value = posonly_keywords_kwonly_opt_impl(module, a, b, c, d, e); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_opt_kwonly_opt__doc__, +"posonly_keywords_opt_kwonly_opt($module, a, /, b, c=None, *, d=None,\n" +" e=None)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_OPT_KWONLY_OPT_METHODDEF \ + {"posonly_keywords_opt_kwonly_opt", _PyCFunction_CAST(posonly_keywords_opt_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_opt_kwonly_opt__doc__}, + +static PyObject * +posonly_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, PyObject *d, + PyObject *e); + +static PyObject * +posonly_keywords_opt_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), &_Py_ID(e), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", "e", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_opt_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[5]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *b; + PyObject *c = Py_None; + PyObject *d = Py_None; + PyObject *e = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 3, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[3]) { + d = args[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + e = args[4]; +skip_optional_kwonly: + return_value = posonly_keywords_opt_kwonly_opt_impl(module, a, b, c, d, e); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_opt_keywords_opt_kwonly_opt__doc__, +"posonly_opt_keywords_opt_kwonly_opt($module, a, b=None, /, c=None, *,\n" +" d=None)\n" +"--\n" +"\n"); + +#define POSONLY_OPT_KEYWORDS_OPT_KWONLY_OPT_METHODDEF \ + {"posonly_opt_keywords_opt_kwonly_opt", _PyCFunction_CAST(posonly_opt_keywords_opt_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_opt_keywords_opt_kwonly_opt__doc__}, + +static PyObject * +posonly_opt_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, + PyObject *d); + +static PyObject * +posonly_opt_keywords_opt_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_opt_keywords_opt_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 3, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional_posonly; + } + noptargs--; + b = args[1]; +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + d = args[3]; +skip_optional_kwonly: + return_value = posonly_opt_keywords_opt_kwonly_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(keyword_only_parameter__doc__, +"keyword_only_parameter($module, /, *, a)\n" +"--\n" +"\n"); + +#define KEYWORD_ONLY_PARAMETER_METHODDEF \ + {"keyword_only_parameter", _PyCFunction_CAST(keyword_only_parameter), METH_FASTCALL|METH_KEYWORDS, keyword_only_parameter__doc__}, + +static PyObject * +keyword_only_parameter_impl(PyObject *module, PyObject *a); + +static PyObject * +keyword_only_parameter(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keyword_only_parameter", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *a; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + return_value = keyword_only_parameter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_vararg__doc__, +"posonly_vararg($module, a, /, b, *args)\n" +"--\n" +"\n"); + +#define POSONLY_VARARG_METHODDEF \ + {"posonly_vararg", _PyCFunction_CAST(posonly_vararg), METH_FASTCALL|METH_KEYWORDS, posonly_vararg__doc__}, + +static PyObject * +posonly_vararg_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *args); + +static PyObject * +posonly_vararg(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_vararg", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *a; + PyObject *b; + PyObject *__clinic_args = NULL; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, 2, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + __clinic_args = args[2]; + return_value = posonly_vararg_impl(module, a, b, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg_and_posonly__doc__, +"vararg_and_posonly($module, a, /, *args)\n" +"--\n" +"\n"); + +#define VARARG_AND_POSONLY_METHODDEF \ + {"vararg_and_posonly", _PyCFunction_CAST(vararg_and_posonly), METH_FASTCALL, vararg_and_posonly__doc__}, + +static PyObject * +vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args); + +static PyObject * +vararg_and_posonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *a; + PyObject *__clinic_args = NULL; + + if (!_PyArg_CheckPositional("vararg_and_posonly", nargs, 1, PY_SSIZE_T_MAX)) { + goto exit; + } + a = args[0]; + __clinic_args = PyTuple_New(nargs - 1); + for (Py_ssize_t i = 0; i < nargs - 1; ++i) { + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[1 + i])); + } + return_value = vararg_and_posonly_impl(module, a, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg__doc__, +"vararg($module, /, a, *args)\n" +"--\n" +"\n"); + +#define VARARG_METHODDEF \ + {"vararg", _PyCFunction_CAST(vararg), METH_FASTCALL|METH_KEYWORDS, vararg__doc__}, + +static PyObject * +vararg_impl(PyObject *module, PyObject *a, PyObject *args); + +static PyObject * +vararg(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *__clinic_args = NULL; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + __clinic_args = args[1]; + return_value = vararg_impl(module, a, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg_with_default__doc__, +"vararg_with_default($module, /, a, *args, b=False)\n" +"--\n" +"\n"); + +#define VARARG_WITH_DEFAULT_METHODDEF \ + {"vararg_with_default", _PyCFunction_CAST(vararg_with_default), METH_FASTCALL|METH_KEYWORDS, vararg_with_default__doc__}, + +static PyObject * +vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, + int b); + +static PyObject * +vararg_with_default(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg_with_default", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *__clinic_args = NULL; + int b = 0; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + __clinic_args = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + b = PyObject_IsTrue(args[2]); + if (b < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = vararg_with_default_impl(module, a, __clinic_args, b); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg_with_only_defaults__doc__, +"vararg_with_only_defaults($module, /, *args, b=None)\n" +"--\n" +"\n"); + +#define VARARG_WITH_ONLY_DEFAULTS_METHODDEF \ + {"vararg_with_only_defaults", _PyCFunction_CAST(vararg_with_only_defaults), METH_FASTCALL|METH_KEYWORDS, vararg_with_only_defaults__doc__}, + +static PyObject * +vararg_with_only_defaults_impl(PyObject *module, PyObject *args, PyObject *b); + +static PyObject * +vararg_with_only_defaults(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg_with_only_defaults", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = 0 + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *__clinic_args = NULL; + PyObject *b = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, 0, argsbuf); + if (!args) { + goto exit; + } + __clinic_args = args[0]; + if (!noptargs) { + goto skip_optional_kwonly; + } + b = args[1]; +skip_optional_kwonly: + return_value = vararg_with_only_defaults_impl(module, __clinic_args, b); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(gh_32092_oob__doc__, +"gh_32092_oob($module, /, pos1, pos2, *varargs, kw1=None, kw2=None)\n" +"--\n" +"\n" +"Proof-of-concept of GH-32092 OOB bug."); + +#define GH_32092_OOB_METHODDEF \ + {"gh_32092_oob", _PyCFunction_CAST(gh_32092_oob), METH_FASTCALL|METH_KEYWORDS, gh_32092_oob__doc__}, + +static PyObject * +gh_32092_oob_impl(PyObject *module, PyObject *pos1, PyObject *pos2, + PyObject *varargs, PyObject *kw1, PyObject *kw2); + +static PyObject * +gh_32092_oob(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(pos1), &_Py_ID(pos2), &_Py_ID(kw1), &_Py_ID(kw2), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"pos1", "pos2", "kw1", "kw2", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "gh_32092_oob", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[5]; + Py_ssize_t noptargs = Py_MIN(nargs, 2) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *pos1; + PyObject *pos2; + PyObject *varargs = NULL; + PyObject *kw1 = Py_None; + PyObject *kw2 = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, 2, argsbuf); + if (!args) { + goto exit; + } + pos1 = args[0]; + pos2 = args[1]; + varargs = args[2]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[3]) { + kw1 = args[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + kw2 = args[4]; +skip_optional_kwonly: + return_value = gh_32092_oob_impl(module, pos1, pos2, varargs, kw1, kw2); + +exit: + Py_XDECREF(varargs); + return return_value; +} + +PyDoc_STRVAR(gh_32092_kw_pass__doc__, +"gh_32092_kw_pass($module, /, pos, *args, kw=None)\n" +"--\n" +"\n" +"Proof-of-concept of GH-32092 keyword args passing bug."); + +#define GH_32092_KW_PASS_METHODDEF \ + {"gh_32092_kw_pass", _PyCFunction_CAST(gh_32092_kw_pass), METH_FASTCALL|METH_KEYWORDS, gh_32092_kw_pass__doc__}, + +static PyObject * +gh_32092_kw_pass_impl(PyObject *module, PyObject *pos, PyObject *args, + PyObject *kw); + +static PyObject * +gh_32092_kw_pass(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(pos), &_Py_ID(kw), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"pos", "kw", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "gh_32092_kw_pass", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *pos; + PyObject *__clinic_args = NULL; + PyObject *kw = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); + if (!args) { + goto exit; + } + pos = args[0]; + __clinic_args = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + kw = args[2]; +skip_optional_kwonly: + return_value = gh_32092_kw_pass_impl(module, pos, __clinic_args, kw); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(gh_99233_refcount__doc__, +"gh_99233_refcount($module, /, *args)\n" +"--\n" +"\n" +"Proof-of-concept of GH-99233 refcount error bug."); + +#define GH_99233_REFCOUNT_METHODDEF \ + {"gh_99233_refcount", _PyCFunction_CAST(gh_99233_refcount), METH_FASTCALL, gh_99233_refcount__doc__}, + +static PyObject * +gh_99233_refcount_impl(PyObject *module, PyObject *args); + +static PyObject * +gh_99233_refcount(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *__clinic_args = NULL; + + if (!_PyArg_CheckPositional("gh_99233_refcount", nargs, 0, PY_SSIZE_T_MAX)) { + goto exit; + } + __clinic_args = PyTuple_New(nargs - 0); + for (Py_ssize_t i = 0; i < nargs - 0; ++i) { + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[0 + i])); + } + return_value = gh_99233_refcount_impl(module, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(gh_99240_double_free__doc__, +"gh_99240_double_free($module, a, b, /)\n" +"--\n" +"\n" +"Proof-of-concept of GH-99240 double-free bug."); + +#define GH_99240_DOUBLE_FREE_METHODDEF \ + {"gh_99240_double_free", _PyCFunction_CAST(gh_99240_double_free), METH_FASTCALL, gh_99240_double_free__doc__}, + +static PyObject * +gh_99240_double_free_impl(PyObject *module, char *a, char *b); + +static PyObject * +gh_99240_double_free(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + char *a = NULL; + char *b = NULL; + + if (!_PyArg_ParseStack(args, nargs, "eses:gh_99240_double_free", + "idna", &a, "idna", &b)) { + goto exit; + } + return_value = gh_99240_double_free_impl(module, a, b); + /* Post parse cleanup for a */ + PyMem_FREE(a); + /* Post parse cleanup for b */ + PyMem_FREE(b); + +exit: + return return_value; +} +/*[clinic end generated code: output=9a5ca5909c087102 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testinternalcapi.c.h b/Modules/clinic/_testinternalcapi.c.h index 8113fff37997af..e8d5681b194916 100644 --- a/Modules/clinic/_testinternalcapi.c.h +++ b/Modules/clinic/_testinternalcapi.c.h @@ -8,6 +8,69 @@ preserve #endif +PyDoc_STRVAR(_testinternalcapi_compiler_codegen__doc__, +"compiler_codegen($module, /, ast, filename, optimize)\n" +"--\n" +"\n" +"Apply compiler code generation to an AST."); + +#define _TESTINTERNALCAPI_COMPILER_CODEGEN_METHODDEF \ + {"compiler_codegen", _PyCFunction_CAST(_testinternalcapi_compiler_codegen), METH_FASTCALL|METH_KEYWORDS, _testinternalcapi_compiler_codegen__doc__}, + +static PyObject * +_testinternalcapi_compiler_codegen_impl(PyObject *module, PyObject *ast, + PyObject *filename, int optimize); + +static PyObject * +_testinternalcapi_compiler_codegen(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(ast), &_Py_ID(filename), &_Py_ID(optimize), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"ast", "filename", "optimize", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "compiler_codegen", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *ast; + PyObject *filename; + int optimize; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 0, argsbuf); + if (!args) { + goto exit; + } + ast = args[0]; + filename = args[1]; + optimize = _PyLong_AsInt(args[2]); + if (optimize == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testinternalcapi_compiler_codegen_impl(module, ast, filename, optimize); + +exit: + return return_value; +} + PyDoc_STRVAR(_testinternalcapi_optimize_cfg__doc__, "optimize_cfg($module, /, instructions, consts)\n" "--\n" @@ -65,4 +128,4 @@ _testinternalcapi_optimize_cfg(PyObject *module, PyObject *const *args, Py_ssize exit: return return_value; } -/*[clinic end generated code: output=3b1fd713290f68a9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=efe95836482fd542 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_tkinter.c.h b/Modules/clinic/_tkinter.c.h index a251202f9bba2c..96c6ee26f426c3 100644 --- a/Modules/clinic/_tkinter.c.h +++ b/Modules/clinic/_tkinter.c.h @@ -747,29 +747,29 @@ _tkinter_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 4) { goto skip_optional; } - interactive = _PyLong_AsInt(args[3]); - if (interactive == -1 && PyErr_Occurred()) { + interactive = PyObject_IsTrue(args[3]); + if (interactive < 0) { goto exit; } if (nargs < 5) { goto skip_optional; } - wantobjects = _PyLong_AsInt(args[4]); - if (wantobjects == -1 && PyErr_Occurred()) { + wantobjects = PyObject_IsTrue(args[4]); + if (wantobjects < 0) { goto exit; } if (nargs < 6) { goto skip_optional; } - wantTk = _PyLong_AsInt(args[5]); - if (wantTk == -1 && PyErr_Occurred()) { + wantTk = PyObject_IsTrue(args[5]); + if (wantTk < 0) { goto exit; } if (nargs < 7) { goto skip_optional; } - sync = _PyLong_AsInt(args[6]); - if (sync == -1 && PyErr_Occurred()) { + sync = PyObject_IsTrue(args[6]); + if (sync < 0) { goto exit; } if (nargs < 8) { @@ -865,4 +865,4 @@ _tkinter_getbusywaitinterval(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #define _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #endif /* !defined(_TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF) */ -/*[clinic end generated code: output=d022835d05fc8608 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2a4e3bf8448604b5 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index cc1a5881e0bfd6..891b3f851d1243 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -133,7 +133,7 @@ _winapi_ConnectNamedPipe(PyObject *module, PyObject *const *args, Py_ssize_t nar static const char * const _keywords[] = {"handle", "overlapped", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, - .format = "" F_HANDLE "|i:ConnectNamedPipe", + .format = "" F_HANDLE "|p:ConnectNamedPipe", .kwtuple = KWTUPLE, }; #undef KWTUPLE @@ -742,6 +742,32 @@ _winapi_MapViewOfFile(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } +PyDoc_STRVAR(_winapi_UnmapViewOfFile__doc__, +"UnmapViewOfFile($module, address, /)\n" +"--\n" +"\n"); + +#define _WINAPI_UNMAPVIEWOFFILE_METHODDEF \ + {"UnmapViewOfFile", (PyCFunction)_winapi_UnmapViewOfFile, METH_O, _winapi_UnmapViewOfFile__doc__}, + +static PyObject * +_winapi_UnmapViewOfFile_impl(PyObject *module, LPCVOID address); + +static PyObject * +_winapi_UnmapViewOfFile(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + LPCVOID address; + + if (!PyArg_Parse(arg, "" F_POINTER ":UnmapViewOfFile", &address)) { + goto exit; + } + return_value = _winapi_UnmapViewOfFile_impl(module, address); + +exit: + return return_value; +} + PyDoc_STRVAR(_winapi_OpenFileMapping__doc__, "OpenFileMapping($module, desired_access, inherit_handle, name, /)\n" "--\n" @@ -946,7 +972,7 @@ _winapi_ReadFile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb static const char * const _keywords[] = {"handle", "size", "overlapped", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, - .format = "" F_HANDLE "k|i:ReadFile", + .format = "" F_HANDLE "k|p:ReadFile", .kwtuple = KWTUPLE, }; #undef KWTUPLE @@ -1194,7 +1220,7 @@ _winapi_WriteFile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO static const char * const _keywords[] = {"handle", "buffer", "overlapped", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, - .format = "" F_HANDLE "O|i:WriteFile", + .format = "" F_HANDLE "O|p:WriteFile", .kwtuple = KWTUPLE, }; #undef KWTUPLE @@ -1345,4 +1371,4 @@ _winapi__mimetypes_read_windows_registry(PyObject *module, PyObject *const *args exit: return return_value; } -/*[clinic end generated code: output=83c4a3f0e70e7775 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=edb1a9d1bbfd6394 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/binascii.c.h b/Modules/clinic/binascii.c.h index 23ebdff2108258..63566dfb10e74f 100644 --- a/Modules/clinic/binascii.c.h +++ b/Modules/clinic/binascii.c.h @@ -99,8 +99,8 @@ binascii_b2a_uu(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj if (!noptargs) { goto skip_optional_kwonly; } - backtick = _PyLong_AsInt(args[1]); - if (backtick == -1 && PyErr_Occurred()) { + backtick = PyObject_IsTrue(args[1]); + if (backtick < 0) { goto exit; } skip_optional_kwonly: @@ -175,8 +175,8 @@ binascii_a2b_base64(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P if (!noptargs) { goto skip_optional_kwonly; } - strict_mode = _PyLong_AsInt(args[1]); - if (strict_mode == -1 && PyErr_Occurred()) { + strict_mode = PyObject_IsTrue(args[1]); + if (strict_mode < 0) { goto exit; } skip_optional_kwonly: @@ -250,8 +250,8 @@ binascii_b2a_base64(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P if (!noptargs) { goto skip_optional_kwonly; } - newline = _PyLong_AsInt(args[1]); - if (newline == -1 && PyErr_Occurred()) { + newline = PyObject_IsTrue(args[1]); + if (newline < 0) { goto exit; } skip_optional_kwonly: @@ -680,8 +680,8 @@ binascii_a2b_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj if (!noptargs) { goto skip_optional_pos; } - header = _PyLong_AsInt(args[1]); - if (header == -1 && PyErr_Occurred()) { + header = PyObject_IsTrue(args[1]); + if (header < 0) { goto exit; } skip_optional_pos: @@ -763,8 +763,8 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_pos; } if (args[1]) { - quotetabs = _PyLong_AsInt(args[1]); - if (quotetabs == -1 && PyErr_Occurred()) { + quotetabs = PyObject_IsTrue(args[1]); + if (quotetabs < 0) { goto exit; } if (!--noptargs) { @@ -772,16 +772,16 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[2]) { - istext = _PyLong_AsInt(args[2]); - if (istext == -1 && PyErr_Occurred()) { + istext = PyObject_IsTrue(args[2]); + if (istext < 0) { goto exit; } if (!--noptargs) { goto skip_optional_pos; } } - header = _PyLong_AsInt(args[3]); - if (header == -1 && PyErr_Occurred()) { + header = PyObject_IsTrue(args[3]); + if (header < 0) { goto exit; } skip_optional_pos: @@ -795,4 +795,4 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj return return_value; } -/*[clinic end generated code: output=a266ba13c374aefa input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ab156917c9db79d2 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/itertoolsmodule.c.h b/Modules/clinic/itertoolsmodule.c.h index 17f9ebb249390f..287de524e91307 100644 --- a/Modules/clinic/itertoolsmodule.c.h +++ b/Modules/clinic/itertoolsmodule.c.h @@ -12,19 +12,19 @@ PyDoc_STRVAR(batched_new__doc__, "batched(iterable, n)\n" "--\n" "\n" -"Batch data into lists of length n. The last batch may be shorter than n.\n" +"Batch data into tuples of length n. The last batch may be shorter than n.\n" "\n" -"Loops over the input iterable and accumulates data into lists\n" +"Loops over the input iterable and accumulates data into tuples\n" "up to size n. The input is consumed lazily, just enough to\n" -"fill a list. The result is yielded as soon as a batch is full\n" +"fill a batch. The result is yielded as soon as a batch is full\n" "or when the input iterable is exhausted.\n" "\n" " >>> for batch in batched(\'ABCDEFG\', 3):\n" " ... print(batch)\n" " ...\n" -" [\'A\', \'B\', \'C\']\n" -" [\'D\', \'E\', \'F\']\n" -" [\'G\']"); +" (\'A\', \'B\', \'C\')\n" +" (\'D\', \'E\', \'F\')\n" +" (\'G\',)"); static PyObject * batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n); @@ -913,4 +913,4 @@ itertools_count(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=efea8cd1e647bd17 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0229ebd72962f130 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 1ad96ea296ea68..86251008b1bdae 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -3110,8 +3110,8 @@ os_posix_spawn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[5]) { - resetids = _PyLong_AsInt(args[5]); - if (resetids == -1 && PyErr_Occurred()) { + resetids = PyObject_IsTrue(args[5]); + if (resetids < 0) { goto exit; } if (!--noptargs) { @@ -3119,8 +3119,8 @@ os_posix_spawn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[6]) { - setsid = _PyLong_AsInt(args[6]); - if (setsid == -1 && PyErr_Occurred()) { + setsid = PyObject_IsTrue(args[6]); + if (setsid < 0) { goto exit; } if (!--noptargs) { @@ -3260,8 +3260,8 @@ os_posix_spawnp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[5]) { - resetids = _PyLong_AsInt(args[5]); - if (resetids == -1 && PyErr_Occurred()) { + resetids = PyObject_IsTrue(args[5]); + if (resetids < 0) { goto exit; } if (!--noptargs) { @@ -3269,8 +3269,8 @@ os_posix_spawnp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[6]) { - setsid = _PyLong_AsInt(args[6]); - if (setsid == -1 && PyErr_Occurred()) { + setsid = PyObject_IsTrue(args[6]); + if (setsid < 0) { goto exit; } if (!--noptargs) { @@ -10225,8 +10225,8 @@ os_set_blocking(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (fd == -1 && PyErr_Occurred()) { goto exit; } - blocking = _PyLong_AsInt(args[1]); - if (blocking == -1 && PyErr_Occurred()) { + blocking = PyObject_IsTrue(args[1]); + if (blocking < 0) { goto exit; } return_value = os_set_blocking_impl(module, fd, blocking); @@ -10269,6 +10269,38 @@ os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *c return return_value; } +PyDoc_STRVAR(os_DirEntry_is_junction__doc__, +"is_junction($self, /)\n" +"--\n" +"\n" +"Return True if the entry is a junction; cached per entry."); + +#define OS_DIRENTRY_IS_JUNCTION_METHODDEF \ + {"is_junction", _PyCFunction_CAST(os_DirEntry_is_junction), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_junction__doc__}, + +static int +os_DirEntry_is_junction_impl(DirEntry *self, PyTypeObject *defining_class); + +static PyObject * +os_DirEntry_is_junction(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + int _return_value; + + if (nargs) { + PyErr_SetString(PyExc_TypeError, "is_junction() takes no arguments"); + goto exit; + } + _return_value = os_DirEntry_is_junction_impl(self, defining_class); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + PyDoc_STRVAR(os_DirEntry_stat__doc__, "stat($self, /, *, follow_symlinks=True)\n" "--\n" @@ -11517,4 +11549,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=90f5e6995114e5ca input=a9049054013a1b77]*/ +/*[clinic end generated code: output=04fd23c89ab41f75 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/pyexpat.c.h b/Modules/clinic/pyexpat.c.h index 0454fbc9994504..34937c5d594f5c 100644 --- a/Modules/clinic/pyexpat.c.h +++ b/Modules/clinic/pyexpat.c.h @@ -52,8 +52,8 @@ pyexpat_xmlparser_Parse(xmlparseobject *self, PyTypeObject *cls, PyObject *const if (nargs < 2) { goto skip_optional_posonly; } - isfinal = _PyLong_AsInt(args[1]); - if (isfinal == -1 && PyErr_Occurred()) { + isfinal = PyObject_IsTrue(args[1]); + if (isfinal < 0) { goto exit; } skip_optional_posonly: @@ -498,4 +498,4 @@ pyexpat_ErrorString(PyObject *module, PyObject *arg) #ifndef PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #define PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #endif /* !defined(PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF) */ -/*[clinic end generated code: output=de5f664ef05ef34a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=63efc62e24a7b5a7 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h index f0276c63df18f2..3b3c6ba150a1ad 100644 --- a/Modules/clinic/signalmodule.c.h +++ b/Modules/clinic/signalmodule.c.h @@ -211,8 +211,9 @@ PyDoc_STRVAR(signal_strsignal__doc__, "\n" "Return the system description of the given signal.\n" "\n" -"The return values can be such as \"Interrupt\", \"Segmentation fault\", etc.\n" -"Returns None if the signal is not recognized."); +"Returns the description of signal *signalnum*, such as \"Interrupt\"\n" +"for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no\n" +"description. Raises :exc:`ValueError` if *signalnum* is invalid."); #define SIGNAL_STRSIGNAL_METHODDEF \ {"strsignal", (PyCFunction)signal_strsignal, METH_O, signal_strsignal__doc__}, @@ -704,4 +705,4 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar #ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */ -/*[clinic end generated code: output=f2a3321b32b0637c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2b54dc607f6e3146 input=a9049054013a1b77]*/ diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index 2f6a2198ab3e32..5309a3728c5e07 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -18,12 +18,6 @@ # include <sys/resource.h> #endif -/* Using an alternative stack requires sigaltstack() - and sigaction() SA_ONSTACK */ -#if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION) -# define FAULTHANDLER_USE_ALT_STACK -#endif - #if defined(FAULTHANDLER_USE_ALT_STACK) && defined(HAVE_LINUX_AUXVEC_H) && defined(HAVE_SYS_AUXV_H) # include <linux/auxvec.h> // AT_MINSIGSTKSZ # include <sys/auxv.h> // getauxval() @@ -32,13 +26,6 @@ /* Allocate at maximum 100 MiB of the stack to raise the stack overflow */ #define STACK_OVERFLOW_MAX_SIZE (100 * 1024 * 1024) -#ifndef MS_WINDOWS - /* register() is useless on Windows, because only SIGSEGV, SIGABRT and - SIGILL can be handled by the process, and these signals can only be used - with enable(), not using register() */ -# define FAULTHANDLER_USER -#endif - #define PUTS(fd, str) _Py_write_noraise(fd, str, strlen(str)) @@ -58,12 +45,6 @@ #endif -#ifdef HAVE_SIGACTION -typedef struct sigaction _Py_sighandler_t; -#else -typedef PyOS_sighandler_t _Py_sighandler_t; -#endif - typedef struct { int signum; int enabled; @@ -72,47 +53,12 @@ typedef struct { int all_threads; } fault_handler_t; -static struct { - int enabled; - PyObject *file; - int fd; - int all_threads; - PyInterpreterState *interp; -#ifdef MS_WINDOWS - void *exc_handler; -#endif -} fatal_error = {0, NULL, -1, 0}; - -static struct { - PyObject *file; - int fd; - PY_TIMEOUT_T timeout_us; /* timeout in microseconds */ - int repeat; - PyInterpreterState *interp; - int exit; - char *header; - size_t header_len; - /* The main thread always holds this lock. It is only released when - faulthandler_thread() is interrupted before this thread exits, or at - Python exit. */ - PyThread_type_lock cancel_event; - /* released by child thread when joined */ - PyThread_type_lock running; -} thread; +#define fatal_error _PyRuntime.faulthandler.fatal_error +#define thread _PyRuntime.faulthandler.thread #ifdef FAULTHANDLER_USER -typedef struct { - int enabled; - PyObject *file; - int fd; - int all_threads; - int chain; - _Py_sighandler_t previous; - PyInterpreterState *interp; -} user_signal_t; - -static user_signal_t *user_signals; - +#define user_signals _PyRuntime.faulthandler.user_signals +typedef struct faulthandler_user_signal user_signal_t; static void faulthandler_user(int signum); #endif /* FAULTHANDLER_USER */ @@ -134,8 +80,8 @@ static const size_t faulthandler_nsignals = \ Py_ARRAY_LENGTH(faulthandler_handlers); #ifdef FAULTHANDLER_USE_ALT_STACK -static stack_t stack; -static stack_t old_stack; +# define stack _PyRuntime.faulthandler.stack +# define old_stack _PyRuntime.faulthandler.old_stack #endif @@ -270,7 +216,7 @@ faulthandler_dump_traceback_py(PyObject *self, int fd; if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "|Oi:dump_traceback", kwlist, + "|Op:dump_traceback", kwlist, &file, &all_threads)) return NULL; @@ -333,14 +279,17 @@ faulthandler_fatal_error(int signum) size_t i; fault_handler_t *handler = NULL; int save_errno = errno; + int found = 0; if (!fatal_error.enabled) return; for (i=0; i < faulthandler_nsignals; i++) { handler = &faulthandler_handlers[i]; - if (handler->signum == signum) + if (handler->signum == signum) { + found = 1; break; + } } if (handler == NULL) { /* faulthandler_nsignals == 0 (unlikely) */ @@ -350,9 +299,18 @@ faulthandler_fatal_error(int signum) /* restore the previous handler */ faulthandler_disable_fatal_handler(handler); - PUTS(fd, "Fatal Python error: "); - PUTS(fd, handler->name); - PUTS(fd, "\n\n"); + if (found) { + PUTS(fd, "Fatal Python error: "); + PUTS(fd, handler->name); + PUTS(fd, "\n\n"); + } + else { + char unknown_signum[23] = {0,}; + snprintf(unknown_signum, 23, "%d", signum); + PUTS(fd, "Fatal Python error from unexpected signum: "); + PUTS(fd, unknown_signum); + PUTS(fd, "\n\n"); + } faulthandler_dump_traceback(fd, fatal_error.all_threads, fatal_error.interp); @@ -534,7 +492,7 @@ faulthandler_py_enable(PyObject *self, PyObject *args, PyObject *kwargs) PyThreadState *tstate; if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "|Oi:enable", kwlist, &file, &all_threads)) + "|Op:enable", kwlist, &file, &all_threads)) return NULL; fd = faulthandler_get_fileno(&file); @@ -904,7 +862,7 @@ faulthandler_register_py(PyObject *self, int err; if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "i|Oii:register", kwlist, + "i|Opp:register", kwlist, &signum, &file, &all_threads, &chain)) return NULL; @@ -1082,7 +1040,7 @@ faulthandler_fatal_error_thread(void *plock) static PyObject * faulthandler_fatal_error_c_thread(PyObject *self, PyObject *args) { - long thread; + long tid; PyThread_type_lock lock; faulthandler_suppress_crash_report(); @@ -1093,8 +1051,8 @@ faulthandler_fatal_error_c_thread(PyObject *self, PyObject *args) PyThread_acquire_lock(lock, WAIT_LOCK); - thread = PyThread_start_new_thread(faulthandler_fatal_error_thread, lock); - if (thread == -1) { + tid = PyThread_start_new_thread(faulthandler_fatal_error_thread, lock); + if (tid == -1) { PyThread_free_lock(lock); PyErr_SetString(PyExc_RuntimeError, "unable to start the thread"); return NULL; diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 75832e9dd3da63..6630faa6f4471d 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1870,8 +1870,7 @@ gc_is_tracked(PyObject *module, PyObject *obj) result = Py_True; else result = Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /*[clinic input] @@ -2330,7 +2329,6 @@ _PyObject_GC_New(PyTypeObject *tp) PyVarObject * _PyObject_GC_NewVar(PyTypeObject *tp, Py_ssize_t nitems) { - size_t size; PyVarObject *op; if (nitems < 0) { @@ -2338,7 +2336,7 @@ _PyObject_GC_NewVar(PyTypeObject *tp, Py_ssize_t nitems) return NULL; } size_t presize = _PyType_PreHeaderSize(tp); - size = _PyObject_VAR_SIZE(tp, nitems); + size_t size = _PyObject_VAR_SIZE(tp, nitems); op = (PyVarObject *)gc_alloc(size, presize); if (op == NULL) { return NULL; @@ -2352,7 +2350,7 @@ _PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems) { const size_t basicsize = _PyObject_VAR_SIZE(Py_TYPE(op), nitems); _PyObject_ASSERT((PyObject *)op, !_PyObject_GC_IS_TRACKED(op)); - if (basicsize > PY_SSIZE_T_MAX - sizeof(PyGC_Head)) { + if (basicsize > (size_t)PY_SSIZE_T_MAX - sizeof(PyGC_Head)) { return (PyVarObject *)PyErr_NoMemory(); } diff --git a/Modules/getbuildinfo.c b/Modules/getbuildinfo.c index 7cb7397a22c8ab..a24750b76c09bc 100644 --- a/Modules/getbuildinfo.c +++ b/Modules/getbuildinfo.c @@ -31,12 +31,18 @@ #define GITBRANCH "" #endif +static int initialized = 0; +static char buildinfo[50 + sizeof(GITVERSION) + + ((sizeof(GITTAG) > sizeof(GITBRANCH)) ? + sizeof(GITTAG) : sizeof(GITBRANCH))]; + const char * Py_GetBuildInfo(void) { - static char buildinfo[50 + sizeof(GITVERSION) + - ((sizeof(GITTAG) > sizeof(GITBRANCH)) ? - sizeof(GITTAG) : sizeof(GITBRANCH))]; + if (initialized) { + return buildinfo; + } + initialized = 1; const char *revision = _Py_gitversion(); const char *sep = *revision ? ":" : ""; const char *gitid = _Py_gitidentifier(); diff --git a/Modules/getpath.c b/Modules/getpath.c index ceacf36d8968a1..13db010649fed8 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -125,8 +125,7 @@ getpath_isabs(PyObject *Py_UNUSED(self), PyObject *args) r = _Py_isabs(path) ? Py_True : Py_False; PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -153,11 +152,10 @@ getpath_hassuffix(PyObject *Py_UNUSED(self), PyObject *args) wcscmp(&path[len - suffixLen], suffix) != 0 #endif ) { - r = Py_False; + r = Py_NewRef(Py_False); } else { - r = Py_True; + r = Py_NewRef(Py_True); } - Py_INCREF(r); PyMem_Free((void *)suffix); } PyMem_Free((void *)path); @@ -187,8 +185,7 @@ getpath_isdir(PyObject *Py_UNUSED(self), PyObject *args) #endif PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -213,8 +210,7 @@ getpath_isfile(PyObject *Py_UNUSED(self), PyObject *args) #endif PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -247,8 +243,7 @@ getpath_isxfile(PyObject *Py_UNUSED(self), PyObject *args) #endif PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -488,8 +483,7 @@ getpath_realpath(PyObject *Py_UNUSED(self) , PyObject *args) goto done; } if (!S_ISLNK(st.st_mode)) { - Py_INCREF(pathobj); - r = pathobj; + r = Py_NewRef(pathobj); goto done; } wchar_t resolved[MAXPATHLEN+1]; @@ -504,8 +498,7 @@ getpath_realpath(PyObject *Py_UNUSED(self) , PyObject *args) return r; #endif - Py_INCREF(pathobj); - return pathobj; + return Py_NewRef(pathobj); } @@ -591,8 +584,7 @@ wchar_to_dict(PyObject *dict, const char *key, const wchar_t *s) return 0; } } else { - u = Py_None; - Py_INCREF(u); + u = Py_NewRef(Py_None); } r = PyDict_SetItemString(dict, key, u) == 0; Py_DECREF(u); @@ -617,8 +609,7 @@ decode_to_dict(PyObject *dict, const char *key, const char *s) return 0; } } else { - u = Py_None; - Py_INCREF(u); + u = Py_NewRef(Py_None); } r = PyDict_SetItemString(dict, key, u) == 0; Py_DECREF(u); diff --git a/Modules/getpath.py b/Modules/getpath.py index e3558bc49e389f..ab0d2dc0636ad4 100644 --- a/Modules/getpath.py +++ b/Modules/getpath.py @@ -375,6 +375,25 @@ def search_up(prefix, *landmarks, test=isfile): pass if not base_executable: base_executable = joinpath(executable_dir, basename(executable)) + # It's possible "python" is executed from within a posix venv but that + # "python" is not available in the "home" directory as the standard + # `make install` does not create it and distros often do not provide it. + # + # In this case, try to fall back to known alternatives + if os_name != 'nt' and not isfile(base_executable): + base_exe = basename(executable) + for candidate in (DEFAULT_PROGRAM_NAME, f'python{VERSION_MAJOR}.{VERSION_MINOR}'): + candidate += EXE_SUFFIX if EXE_SUFFIX else '' + if base_exe == candidate: + continue + candidate = joinpath(executable_dir, candidate) + # Only set base_executable if the candidate exists. + # If no candidate succeeds, subsequent errors related to + # base_executable (like FileNotFoundError) remain in the + # context of the original executable name + if isfile(candidate): + base_executable = candidate + break break else: venv_prefix = None @@ -579,15 +598,28 @@ def search_up(prefix, *landmarks, test=isfile): # Detect exec_prefix by searching from executable for the platstdlib_dir if PLATSTDLIB_LANDMARK and not exec_prefix: if executable_dir: - exec_prefix = search_up(executable_dir, PLATSTDLIB_LANDMARK, test=isdir) - if not exec_prefix: - if EXEC_PREFIX: - exec_prefix = EXEC_PREFIX - if not isdir(joinpath(exec_prefix, PLATSTDLIB_LANDMARK)): - warn('Could not find platform dependent libraries <exec_prefix>') + if os_name == 'nt': + # QUIRK: For compatibility and security, do not search for DLLs + # directory. The fallback below will cover it + exec_prefix = executable_dir + else: + exec_prefix = search_up(executable_dir, PLATSTDLIB_LANDMARK, test=isdir) + if not exec_prefix and EXEC_PREFIX: + exec_prefix = EXEC_PREFIX + if not exec_prefix or not isdir(joinpath(exec_prefix, PLATSTDLIB_LANDMARK)): + if os_name == 'nt': + # QUIRK: If DLLs is missing on Windows, don't warn, just assume + # that it's all the same as prefix. + # gh-98790: We set platstdlib_dir here to avoid adding "DLLs" into + # sys.path when it doesn't exist, which would give site-packages + # precedence over executable_dir, which is *probably* where our PYDs + # live. Ideally, whoever changes our layout will tell us what the + # layout is, but in the past this worked, so it should keep working. + platstdlib_dir = exec_prefix = prefix else: warn('Could not find platform dependent libraries <exec_prefix>') + # Fallback: assume exec_prefix == prefix if not exec_prefix: exec_prefix = prefix @@ -647,9 +679,8 @@ def search_up(prefix, *landmarks, test=isfile): else: library_dir = executable_dir pythonpath.append(joinpath(library_dir, ZIP_LANDMARK)) - elif build_prefix or venv_prefix: + elif build_prefix: # QUIRK: POSIX uses the default prefix when in the build directory - # or a venv pythonpath.append(joinpath(PREFIX, ZIP_LANDMARK)) else: pythonpath.append(joinpath(prefix, ZIP_LANDMARK)) @@ -689,7 +720,8 @@ def search_up(prefix, *landmarks, test=isfile): pythonpath.append(platstdlib_dir) if stdlib_dir: pythonpath.append(stdlib_dir) - pythonpath.append(executable_dir) + if executable_dir not in pythonpath: + pythonpath.append(executable_dir) else: if stdlib_dir: pythonpath.append(stdlib_dir) diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 381ec3b31d525e..c1f1e7320db719 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -56,11 +56,13 @@ static PyTypeObject pairwise_type; /* batched object ************************************************************/ /* Note: The built-in zip() function includes a "strict" argument - that is needed because that function can silently truncate data - and there is no easy way for a user to detect that condition. - The same reasoning does not apply to batched() which never drops - data. Instead, it produces a shorter list which can be handled - as the user sees fit. + that was needed because that function would silently truncate data, + and there was no easy way for a user to detect the data loss. + The same reasoning does not apply to batched() which never drops data. + Instead, batched() produces a shorter tuple which can be handled + as the user sees fit. If requested, it would be reasonable to add + "fillvalue" support which had demonstrated value in zip_longest(). + For now, the API is kept simple and clean. */ typedef struct { @@ -74,25 +76,25 @@ typedef struct { itertools.batched.__new__ as batched_new iterable: object n: Py_ssize_t -Batch data into lists of length n. The last batch may be shorter than n. +Batch data into tuples of length n. The last batch may be shorter than n. -Loops over the input iterable and accumulates data into lists +Loops over the input iterable and accumulates data into tuples up to size n. The input is consumed lazily, just enough to -fill a list. The result is yielded as soon as a batch is full +fill a batch. The result is yielded as soon as a batch is full or when the input iterable is exhausted. >>> for batch in batched('ABCDEFG', 3): ... print(batch) ... - ['A', 'B', 'C'] - ['D', 'E', 'F'] - ['G'] + ('A', 'B', 'C') + ('D', 'E', 'F') + ('G',) [clinic start generated code]*/ static PyObject * batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n) -/*[clinic end generated code: output=7ebc954d655371b6 input=f28fd12cb52365f0]*/ +/*[clinic end generated code: output=7ebc954d655371b6 input=ffd70726927c5129]*/ { PyObject *it; batchedobject *bo; @@ -150,12 +152,12 @@ batched_next(batchedobject *bo) if (it == NULL) { return NULL; } - result = PyList_New(n); + result = PyTuple_New(n); if (result == NULL) { return NULL; } iternextfunc iternext = *Py_TYPE(it)->tp_iternext; - PyObject **items = _PyList_ITEMS(result); + PyObject **items = _PyTuple_ITEMS(result); for (i=0 ; i < n ; i++) { item = iternext(it); if (item == NULL) { @@ -180,8 +182,7 @@ batched_next(batchedobject *bo) Py_DECREF(result); return NULL; } - /* Elements in result[i:] are still NULL */ - Py_SET_SIZE(result, i); + _PyTuple_Resize(&result, i); return result; } @@ -401,8 +402,7 @@ itertools_groupby_impl(PyTypeObject *type, PyObject *it, PyObject *keyfunc) gbo->tgtkey = NULL; gbo->currkey = NULL; gbo->currvalue = NULL; - gbo->keyfunc = keyfunc; - Py_INCREF(keyfunc); + gbo->keyfunc = Py_NewRef(keyfunc); gbo->it = PyObject_GetIter(it); if (gbo->it == NULL) { Py_DECREF(gbo); @@ -444,8 +444,7 @@ groupby_step(groupbyobject *gbo) return -1; if (gbo->keyfunc == Py_None) { - newkey = newvalue; - Py_INCREF(newvalue); + newkey = Py_NewRef(newvalue); } else { newkey = PyObject_CallOneArg(gbo->keyfunc, newvalue); if (newkey == NULL) { @@ -625,10 +624,8 @@ _grouper_create(groupbyobject *parent, PyObject *tgtkey) igo = PyObject_GC_New(_grouperobject, &_grouper_type); if (igo == NULL) return NULL; - igo->parent = (PyObject *)parent; - Py_INCREF(parent); - igo->tgtkey = tgtkey; - Py_INCREF(tgtkey); + igo->parent = Py_NewRef(parent); + igo->tgtkey = Py_NewRef(tgtkey); parent->currgrouper = igo; /* borrowed reference */ PyObject_GC_Track(igo); @@ -779,8 +776,7 @@ teedataobject_newinternal(PyObject *it) tdo->running = 0; tdo->numread = 0; tdo->nextlink = NULL; - Py_INCREF(it); - tdo->it = it; + tdo->it = Py_NewRef(it); PyObject_GC_Track(tdo); return (PyObject *)tdo; } @@ -790,8 +786,7 @@ teedataobject_jumplink(teedataobject *tdo) { if (tdo->nextlink == NULL) tdo->nextlink = teedataobject_newinternal(tdo->it); - Py_XINCREF(tdo->nextlink); - return tdo->nextlink; + return Py_XNewRef(tdo->nextlink); } static PyObject * @@ -818,8 +813,7 @@ teedataobject_getitem(teedataobject *tdo, int i) tdo->numread++; tdo->values[i] = value; } - Py_INCREF(value); - return value; + return Py_NewRef(value); } static int @@ -841,8 +835,7 @@ teedataobject_safe_decref(PyObject *obj) Py_REFCNT(obj) == 1) { PyObject *nextlink = ((teedataobject *)obj)->nextlink; ((teedataobject *)obj)->nextlink = NULL; - Py_DECREF(obj); - obj = nextlink; + Py_SETREF(obj, nextlink); } Py_XDECREF(obj); } @@ -927,8 +920,7 @@ itertools_teedataobject_impl(PyTypeObject *type, PyObject *it, if (!Py_IS_TYPE(next, &teedataobject_type)) goto err; assert(tdo->nextlink == NULL); - Py_INCREF(next); - tdo->nextlink = next; + tdo->nextlink = Py_NewRef(next); } } else { if (next != Py_None) @@ -1026,8 +1018,7 @@ tee_copy(teeobject *to, PyObject *Py_UNUSED(ignored)) newto = PyObject_GC_New(teeobject, &tee_type); if (newto == NULL) return NULL; - Py_INCREF(to->dataobj); - newto->dataobj = to->dataobj; + newto->dataobj = (teedataobject*)Py_NewRef(to->dataobj); newto->index = to->index; newto->weakreflist = NULL; PyObject_GC_Track(newto); @@ -1343,8 +1334,7 @@ cycle_next(cycleobject *lz) lz->index++; if (lz->index >= PyList_GET_SIZE(lz->saved)) lz->index = 0; - Py_INCREF(item); - return item; + return Py_NewRef(item); } static PyObject * @@ -1379,6 +1369,7 @@ cycle_setstate(cycleobject *lz, PyObject *state) PyErr_SetString(PyExc_TypeError, "state is not a tuple"); return NULL; } + // The second item can be 1/0 in old pickles and True/False in new pickles if (!PyArg_ParseTuple(state, "O!i", &PyList_Type, &saved, &firstpass)) { return NULL; } @@ -1480,8 +1471,7 @@ itertools_dropwhile_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; lz->start = 0; @@ -1643,8 +1633,7 @@ itertools_takewhile_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; lz->stop = 0; @@ -1940,8 +1929,7 @@ islice_reduce(isliceobject *lz, PyObject *Py_UNUSED(ignored)) return Py_BuildValue("O(Nn)n", Py_TYPE(lz), empty_it, 0, 0); } if (lz->stop == -1) { - stop = Py_None; - Py_INCREF(stop); + stop = Py_NewRef(Py_None); } else { stop = PyLong_FromSsize_t(lz->stop); if (stop == NULL) @@ -2062,8 +2050,7 @@ itertools_starmap_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; return (PyObject *)lz; @@ -2499,11 +2486,9 @@ product_dealloc(productobject *lz) static PyObject * product_sizeof(productobject *lz, void *unused) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(lz)); - res += PyTuple_GET_SIZE(lz->pools) * sizeof(Py_ssize_t); - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(lz)); + res += (size_t)PyTuple_GET_SIZE(lz->pools) * sizeof(Py_ssize_t); + return PyLong_FromSize_t(res); } PyDoc_STRVAR(sizeof_doc, "Returns size in memory, in bytes."); @@ -2595,8 +2580,7 @@ product_next(productobject *lz) goto empty; } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: lz->stopped = 1; @@ -2833,11 +2817,9 @@ combinations_dealloc(combinationsobject *co) static PyObject * combinations_sizeof(combinationsobject *co, void *unused) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(co)); - res += co->r * sizeof(Py_ssize_t); - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(co)); + res += (size_t)co->r * sizeof(Py_ssize_t); + return PyLong_FromSize_t(res); } static int @@ -2926,8 +2908,7 @@ combinations_next(combinationsobject *co) } } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: co->stopped = 1; @@ -3170,11 +3151,9 @@ cwr_dealloc(cwrobject *co) static PyObject * cwr_sizeof(cwrobject *co, void *unused) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(co)); - res += co->r * sizeof(Py_ssize_t); - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(co)); + res += (size_t)co->r * sizeof(Py_ssize_t); + return PyLong_FromSize_t(res); } static int @@ -3257,8 +3236,7 @@ cwr_next(cwrobject *co) } } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: co->stopped = 1; @@ -3516,12 +3494,10 @@ permutations_dealloc(permutationsobject *po) static PyObject * permutations_sizeof(permutationsobject *po, void *unused) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(po)); - res += PyTuple_GET_SIZE(po->pool) * sizeof(Py_ssize_t); - res += po->r * sizeof(Py_ssize_t); - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(po)); + res += (size_t)PyTuple_GET_SIZE(po->pool) * sizeof(Py_ssize_t); + res += (size_t)po->r * sizeof(Py_ssize_t); + return PyLong_FromSize_t(res); } static int @@ -3615,8 +3591,7 @@ permutations_next(permutationsobject *po) if (i < 0) goto empty; } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: po->stopped = 1; @@ -3819,13 +3794,11 @@ itertools_accumulate_impl(PyTypeObject *type, PyObject *iterable, } if (binop != Py_None) { - Py_XINCREF(binop); - lz->binop = binop; + lz->binop = Py_XNewRef(binop); } lz->total = NULL; lz->it = it; - Py_XINCREF(initial); - lz->initial = initial; + lz->initial = Py_XNewRef(initial); return (PyObject *)lz; } @@ -3857,18 +3830,15 @@ accumulate_next(accumulateobject *lz) if (lz->initial != Py_None) { lz->total = lz->initial; - Py_INCREF(Py_None); - lz->initial = Py_None; - Py_INCREF(lz->total); - return lz->total; + lz->initial = Py_NewRef(Py_None); + return Py_NewRef(lz->total); } val = (*Py_TYPE(lz->it)->tp_iternext)(lz->it); if (val == NULL) return NULL; if (lz->total == NULL) { - Py_INCREF(val); - lz->total = val; + lz->total = Py_NewRef(val); return lz->total; } @@ -4186,8 +4156,7 @@ itertools_filterfalse_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; return (PyObject *)lz; @@ -4581,8 +4550,7 @@ repeat_new(PyTypeObject *type, PyObject *args, PyObject *kwds) ro = (repeatobject *)type->tp_alloc(type, 0); if (ro == NULL) return NULL; - Py_INCREF(element); - ro->element = element; + ro->element = Py_NewRef(element); ro->cnt = cnt; return (PyObject *)ro; } @@ -4609,8 +4577,7 @@ repeat_next(repeatobject *ro) return NULL; if (ro->cnt > 0) ro->cnt--; - Py_INCREF(ro->element); - return ro->element; + return Py_NewRef(ro->element); } static PyObject * @@ -4782,8 +4749,7 @@ zip_longest_new(PyTypeObject *type, PyObject *args, PyObject *kwds) lz->tuplesize = tuplesize; lz->numactive = tuplesize; lz->result = result; - Py_INCREF(fillvalue); - lz->fillvalue = fillvalue; + lz->fillvalue = Py_NewRef(fillvalue); return (PyObject *)lz; } @@ -4825,8 +4791,7 @@ zip_longest_next(ziplongestobject *lz) for (i=0 ; i < tuplesize ; i++) { it = PyTuple_GET_ITEM(lz->ittuple, i); if (it == NULL) { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); } else { item = PyIter_Next(it); if (item == NULL) { @@ -4836,8 +4801,7 @@ zip_longest_next(ziplongestobject *lz) Py_DECREF(result); return NULL; } else { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); PyTuple_SET_ITEM(lz->ittuple, i, NULL); Py_DECREF(it); } @@ -4859,8 +4823,7 @@ zip_longest_next(ziplongestobject *lz) for (i=0 ; i < tuplesize ; i++) { it = PyTuple_GET_ITEM(lz->ittuple, i); if (it == NULL) { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); } else { item = PyIter_Next(it); if (item == NULL) { @@ -4870,8 +4833,7 @@ zip_longest_next(ziplongestobject *lz) Py_DECREF(result); return NULL; } else { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); PyTuple_SET_ITEM(lz->ittuple, i, NULL); Py_DECREF(it); } diff --git a/Modules/main.c b/Modules/main.c index aa523fc58d93fb..7edfeb3365b4c6 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -296,10 +296,10 @@ pymain_run_module(const wchar_t *modname, int set_argv0) Py_DECREF(module); return pymain_exit_err_print(); } - _Py_UnhandledKeyboardInterrupt = 0; + _PyRuntime.signals.unhandled_keyboard_interrupt = 0; result = PyObject_Call(runmodule, runargs, NULL); if (!result && PyErr_Occurred() == PyExc_KeyboardInterrupt) { - _Py_UnhandledKeyboardInterrupt = 1; + _PyRuntime.signals.unhandled_keyboard_interrupt = 1; } Py_DECREF(runpy); Py_DECREF(runmodule); @@ -696,7 +696,7 @@ Py_RunMain(void) pymain_free(); - if (_Py_UnhandledKeyboardInterrupt) { + if (_PyRuntime.signals.unhandled_keyboard_interrupt) { exitcode = exit_sigint(); } diff --git a/Modules/makesetup b/Modules/makesetup index 5c275ac9a04945..f000c9cd67310e 100755 --- a/Modules/makesetup +++ b/Modules/makesetup @@ -262,12 +262,15 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' | esac # custom flags first, PY_STDMODULE_CFLAGS may contain -I with system libmpdec case $doconfig in - no) cc="$cc $cpps \$(PY_STDMODULE_CFLAGS) \$(CCSHARED)";; + no) + cc="$cc $cpps \$(PY_STDMODULE_CFLAGS) \$(CCSHARED)" + rule="$obj: $src \$(MODULE_${mods_upper}_DEPS) \$(MODULE_DEPS_SHARED) \$(PYTHON_HEADERS); $cc -c $src -o $obj" + ;; *) - cc="$cc $cpps \$(PY_BUILTIN_MODULE_CFLAGS)";; + cc="$cc $cpps \$(PY_BUILTIN_MODULE_CFLAGS)" + rule="$obj: $src \$(MODULE_${mods_upper}_DEPS) \$(MODULE_DEPS_STATIC) \$(PYTHON_HEADERS); $cc -c $src -o $obj" + ;; esac - # force rebuild when header file or module build flavor (static/shared) is changed - rule="$obj: $src \$(MODULE_${mods_upper}_DEPS) \$(MODULE_DEPS); $cc -c $src -o $obj" echo "$rule" >>$rulesf done case $doconfig in diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 46427876b8f4b5..49c0293d4f5ce3 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -2048,8 +2048,7 @@ factorial_odd_part(unsigned long n) inner = PyLong_FromLong(1); if (inner == NULL) return NULL; - outer = inner; - Py_INCREF(outer); + outer = Py_NewRef(inner); upper = 3; for (i = _Py_bit_length(n) - 2; i >= 0; i--) { @@ -2070,8 +2069,7 @@ factorial_odd_part(unsigned long n) Py_DECREF(partial); if (tmp == NULL) goto error; - Py_DECREF(inner); - inner = tmp; + Py_SETREF(inner, tmp); /* Now inner is the product of all odd integers j in the range (0, n/2**i], giving the inner product in the formula above. */ @@ -2079,8 +2077,7 @@ factorial_odd_part(unsigned long n) tmp = PyNumber_Multiply(outer, inner); if (tmp == NULL) goto error; - Py_DECREF(outer); - outer = tmp; + Py_SETREF(outer, tmp); } Py_DECREF(inner); return outer; @@ -3155,8 +3152,7 @@ math_prod_impl(PyObject *module, PyObject *iterable, PyObject *start) long i_result = PyLong_AsLongAndOverflow(result, &overflow); /* If this already overflowed, don't even enter the loop. */ if (overflow == 0) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } /* Loop over all the items in the iterable until we finish, we overflow * or we found a non integer element */ @@ -3203,8 +3199,7 @@ math_prod_impl(PyObject *module, PyObject *iterable, PyObject *start) */ if (PyFloat_CheckExact(result)) { double f_result = PyFloat_AS_DOUBLE(result); - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); while(result == NULL) { item = PyIter_Next(iter); if (item == NULL) { @@ -3253,8 +3248,7 @@ math_prod_impl(PyObject *module, PyObject *iterable, PyObject *start) if (item == NULL) { /* error, or end-of-sequence */ if (PyErr_Occurred()) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } break; } @@ -3521,8 +3515,7 @@ perm_comb(PyObject *n, unsigned long long k, int iscomb) return PyLong_FromLong(1); } if (k == 1) { - Py_INCREF(n); - return n; + return Py_NewRef(n); } /* P(n, k) = P(n, j) * P(n-j, k-j) */ diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index fdce783fdec5e2..8244202376c74e 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -746,8 +746,7 @@ mmap__enter__method(mmap_object *self, PyObject *args) { CHECK_VALID(NULL); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -805,12 +804,11 @@ mmap__repr__method(PyObject *self) static PyObject * mmap__sizeof__method(mmap_object *self, void *unused) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(self)); - if (self->tagname) + size_t res = _PyObject_SIZE(Py_TYPE(self)); + if (self->tagname) { res += strlen(self->tagname) + 1; - return PyLong_FromSsize_t(res); + } + return PyLong_FromSize_t(res); } #endif diff --git a/Modules/nismodule.c b/Modules/nismodule.c index 39b991162b2761..ec7f6d8031e84b 100644 --- a/Modules/nismodule.c +++ b/Modules/nismodule.c @@ -458,8 +458,7 @@ nis_maps (PyObject *module, PyObject *args, PyObject *kwdict) if (!str || PyList_Append(list, str) < 0) { Py_XDECREF(str); - Py_DECREF(list); - list = NULL; + Py_SETREF(list, NULL); break; } Py_DECREF(str); diff --git a/Modules/ossaudiodev.c b/Modules/ossaudiodev.c index 0568b6dc44103a..2319eac449eb4f 100644 --- a/Modules/ossaudiodev.c +++ b/Modules/ossaudiodev.c @@ -20,7 +20,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -535,16 +534,13 @@ oss_close(oss_audio_t *self, PyObject *unused) static PyObject * oss_self(PyObject *self, PyObject *unused) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * oss_exit(PyObject *self, PyObject *unused) { - _Py_IDENTIFIER(close); - - PyObject *ret = _PyObject_CallMethodIdNoArgs(self, &PyId_close); + PyObject *ret = PyObject_CallMethod(self, "close", NULL); if (!ret) return NULL; Py_DECREF(ret); @@ -573,7 +569,7 @@ oss_setparameters(oss_audio_t *self, PyObject *args) if (!_is_fd_valid(self->fd)) return NULL; - if (!PyArg_ParseTuple(args, "iii|i:setparameters", + if (!PyArg_ParseTuple(args, "iii|p:setparameters", &wanted_fmt, &wanted_channels, &wanted_rate, &strict)) return NULL; @@ -1138,10 +1134,8 @@ PyInit_ossaudiodev(void) NULL, NULL); if (OSSAudioError) { /* Each call to PyModule_AddObject decrefs it; compensate: */ - Py_INCREF(OSSAudioError); - Py_INCREF(OSSAudioError); - PyModule_AddObject(m, "error", OSSAudioError); - PyModule_AddObject(m, "OSSAudioError", OSSAudioError); + PyModule_AddObject(m, "error", Py_NewRef(OSSAudioError)); + PyModule_AddObject(m, "OSSAudioError", Py_NewRef(OSSAudioError)); } /* Build 'control_labels' and 'control_names' lists and add them diff --git a/Modules/overlapped.c b/Modules/overlapped.c index deb772e9eff487..02c0f401be4c9e 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -912,8 +912,7 @@ _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait) _PyBytes_Resize(&self->allocated_buffer, transferred)) return NULL; - Py_INCREF(self->allocated_buffer); - return self->allocated_buffer; + return Py_NewRef(self->allocated_buffer); case TYPE_READ_FROM: assert(PyBytes_CheckExact(self->read_from.allocated_buffer)); @@ -940,14 +939,12 @@ _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait) } // first item: message - Py_INCREF(self->read_from.allocated_buffer); PyTuple_SET_ITEM(self->read_from.result, 0, - self->read_from.allocated_buffer); + Py_NewRef(self->read_from.allocated_buffer)); // second item: address PyTuple_SET_ITEM(self->read_from.result, 1, addr); - Py_INCREF(self->read_from.result); - return self->read_from.result; + return Py_NewRef(self->read_from.result); case TYPE_READ_FROM_INTO: // unparse the address addr = unparse_address((SOCKADDR*)&self->read_from_into.address, @@ -970,8 +967,7 @@ _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait) // second item: address PyTuple_SET_ITEM(self->read_from_into.result, 1, addr); - Py_INCREF(self->read_from_into.result); - return self->read_from_into.result; + return Py_NewRef(self->read_from_into.result); default: return PyLong_FromUnsignedLong((unsigned long) transferred); } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index d863f9f63248f5..4817973262f484 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -16,6 +16,9 @@ #ifdef MS_WINDOWS # include <windows.h> # include <pathcch.h> +# include <lmcons.h> // UNLEN +# include "osdefs.h" // SEP +# define HAVE_SYMLINK #endif #ifdef __VXWORKS__ @@ -154,6 +157,18 @@ # define HAVE_SYMLINKAT_RUNTIME (symlinkat != NULL) # endif +# ifdef HAVE_UTIMENSAT +# define HAVE_UTIMENSAT_RUNTIME (utimensat != NULL) +# endif + +# ifdef HAVE_FUTIMENS +# define HAVE_FUTIMENS_RUNTIME (futimens != NULL) +# endif + +# ifdef HAVE_PWRITEV +# define HAVE_PWRITEV_RUNTIME (pwritev != NULL) +# endif + #endif #ifdef HAVE_FUTIMESAT @@ -414,18 +429,7 @@ extern char *ctermid_r(char *); # ifdef HAVE_PROCESS_H # include <process.h> # endif -# ifndef IO_REPARSE_TAG_SYMLINK -# define IO_REPARSE_TAG_SYMLINK (0xA000000CL) -# endif -# ifndef IO_REPARSE_TAG_MOUNT_POINT -# define IO_REPARSE_TAG_MOUNT_POINT (0xA0000003L) -# endif -# include "osdefs.h" // SEP # include <malloc.h> -# include <windows.h> -# include <shellapi.h> // ShellExecute() -# include <lmcons.h> // UNLEN -# define HAVE_SYMLINK #endif /* _MSC_VER */ #ifndef MAXPATHLEN @@ -491,9 +495,11 @@ extern char *ctermid_r(char *); #ifdef MS_WINDOWS # define INITFUNC PyInit_nt # define MODNAME "nt" +# define MODNAME_OBJ &_Py_ID(nt) #else # define INITFUNC PyInit_posix # define MODNAME "posix" +# define MODNAME_OBJ &_Py_ID(posix) #endif #if defined(__sun) @@ -970,6 +976,7 @@ typedef struct { #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) PyObject *SchedParamType; #endif + newfunc statresult_new_orig; PyObject *StatResultType; PyObject *StatVFSResultType; PyObject *TerminalSizeType; @@ -1190,8 +1197,7 @@ path_converter(PyObject *o, void *p) } /* still owns a reference to the original object */ - Py_DECREF(o); - o = res; + Py_SETREF(o, res); } if (is_unicode) { @@ -1221,8 +1227,7 @@ path_converter(PyObject *o, void *p) #endif } else if (is_bytes) { - bytes = o; - Py_INCREF(bytes); + bytes = Py_NewRef(o); } else if (is_index) { if (!_fd_converter(o, &path->fd)) { @@ -2223,7 +2228,6 @@ static PyStructSequence_Desc waitid_result_desc = { 5 }; #endif -static newfunc structseq_new; static PyObject * statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds) @@ -2231,6 +2235,19 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds) PyStructSequence *result; int i; + // ht_module doesn't get set in PyStructSequence_NewType(), + // so we can't use PyType_GetModule(). + PyObject *mod = PyImport_GetModule(MODNAME_OBJ); + if (mod == NULL) { + return NULL; + } + _posixstate *state = get_posix_state(mod); + Py_DECREF(mod); + if (state == NULL) { + return NULL; + } +#define structseq_new state->statresult_new_orig + result = (PyStructSequence*)structseq_new(type, args, kwds); if (!result) return NULL; @@ -2240,8 +2257,7 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds) for (i = 7; i <= 9; i++) { if (result->ob_item[i+3] == Py_None) { Py_DECREF(Py_None); - Py_INCREF(result->ob_item[i]); - result->ob_item[i+3] = result->ob_item[i]; + result->ob_item[i+3] = Py_NewRef(result->ob_item[i]); } } return (PyObject*)result; @@ -4027,14 +4043,12 @@ _listdir_windows_no_opendir(path_t *path, PyObject *list) Py_SETREF(v, PyUnicode_EncodeFSDefault(v)); } if (v == NULL) { - Py_DECREF(list); - list = NULL; + Py_SETREF(list, NULL); break; } if (PyList_Append(list, v) != 0) { Py_DECREF(v); - Py_DECREF(list); - list = NULL; + Py_SETREF(list, NULL); break; } Py_DECREF(v); @@ -6325,9 +6339,9 @@ os.posix_spawn A sequence of file action tuples. setpgroup: object = NULL The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. - resetids: bool(accept={int}) = False + resetids: bool = False If the value is `true` the POSIX_SPAWN_RESETIDS will be activated. - setsid: bool(accept={int}) = False + setsid: bool = False If the value is `true` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. setsigmask: object(c_default='NULL') = () The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. @@ -6345,7 +6359,7 @@ os_posix_spawn_impl(PyObject *module, path_t *path, PyObject *argv, PyObject *setpgroup, int resetids, int setsid, PyObject *setsigmask, PyObject *setsigdef, PyObject *scheduler) -/*[clinic end generated code: output=14a1098c566bc675 input=8c6305619a00ad04]*/ +/*[clinic end generated code: output=14a1098c566bc675 input=808aed1090d84e33]*/ { return py_posix_spawn(0, module, path, argv, env, file_actions, setpgroup, resetids, setsid, setsigmask, setsigdef, @@ -6371,9 +6385,9 @@ os.posix_spawnp A sequence of file action tuples. setpgroup: object = NULL The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. - resetids: bool(accept={int}) = False + resetids: bool = False If the value is `True` the POSIX_SPAWN_RESETIDS will be activated. - setsid: bool(accept={int}) = False + setsid: bool = False If the value is `True` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. setsigmask: object(c_default='NULL') = () The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. @@ -6391,7 +6405,7 @@ os_posix_spawnp_impl(PyObject *module, path_t *path, PyObject *argv, PyObject *setpgroup, int resetids, int setsid, PyObject *setsigmask, PyObject *setsigdef, PyObject *scheduler) -/*[clinic end generated code: output=7b9aaefe3031238d input=c1911043a22028da]*/ +/*[clinic end generated code: output=7b9aaefe3031238d input=9e89e616116752a1]*/ { return py_posix_spawn(1, module, path, argv, env, file_actions, setpgroup, resetids, setsid, setsigmask, setsigdef, @@ -6888,8 +6902,7 @@ os_sched_param_impl(PyTypeObject *type, PyObject *sched_priority) res = PyStructSequence_New(type); if (!res) return NULL; - Py_INCREF(sched_priority); - PyStructSequence_SET_ITEM(res, 0, sched_priority); + PyStructSequence_SET_ITEM(res, 0, Py_NewRef(sched_priority)); return res; } @@ -8017,8 +8030,7 @@ os_kill_impl(PyObject *module, pid_t pid, Py_ssize_t signal) err = GetLastError(); result = PyErr_SetFromWindowsErr(err); } else { - Py_INCREF(Py_None); - result = Py_None; + result = Py_NewRef(Py_None); } CloseHandle(handle); @@ -8647,7 +8659,7 @@ os_unshare_impl(PyObject *module, int flags) if (res != 0) { return posix_error(); } - + Py_RETURN_NONE; } #endif @@ -9054,11 +9066,6 @@ build_times_result(PyObject *module, double user, double system, } -#ifndef MS_WINDOWS -#define NEED_TICKS_PER_SECOND -static long ticks_per_second = -1; -#endif /* MS_WINDOWS */ - /*[clinic input] os.times @@ -9094,20 +9101,22 @@ os_times_impl(PyObject *module) } #else /* MS_WINDOWS */ { - - struct tms t; clock_t c; errno = 0; c = times(&t); - if (c == (clock_t) -1) + if (c == (clock_t) -1) { return posix_error(); + } + assert(_PyRuntime.time.ticks_per_second_initialized); +#define ticks_per_second _PyRuntime.time.ticks_per_second return build_times_result(module, (double)t.tms_utime / ticks_per_second, (double)t.tms_stime / ticks_per_second, (double)t.tms_cutime / ticks_per_second, (double)t.tms_cstime / ticks_per_second, (double)c / ticks_per_second); +#undef ticks_per_second } #endif /* MS_WINDOWS */ #endif /* HAVE_TIMES */ @@ -9842,7 +9851,7 @@ os_preadv_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, } while (n < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); #else do { -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) /* This entire function will be removed from the module dict when the API * is not available. */ @@ -9857,7 +9866,7 @@ os_preadv_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, Py_END_ALLOW_THREADS } while (n < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) #pragma clang diagnostic pop #endif @@ -10484,7 +10493,7 @@ os_pwritev_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, } while (result < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); #else -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) /* This entire function will be removed from the module dict when the API * is not available. */ @@ -10500,7 +10509,7 @@ os_pwritev_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, Py_END_ALLOW_THREADS } while (result < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) #pragma clang diagnostic pop #endif @@ -13124,15 +13133,13 @@ os_listxattr_impl(PyObject *module, path_t *path, int follow_symlinks) PyObject *attribute = PyUnicode_DecodeFSDefaultAndSize(start, trace - start); if (!attribute) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); goto exit; } error = PyList_Append(result, attribute); Py_DECREF(attribute); if (error) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); goto exit; } start = trace + 1; @@ -13533,7 +13540,7 @@ os_get_blocking_impl(PyObject *module, int fd) /*[clinic input] os.set_blocking fd: int - blocking: bool(accept={int}) + blocking: bool / Set the blocking mode of the specified file descriptor. @@ -13544,7 +13551,7 @@ clear the O_NONBLOCK flag otherwise. static PyObject * os_set_blocking_impl(PyObject *module, int fd, int blocking) -/*[clinic end generated code: output=384eb43aa0762a9d input=bf5c8efdc5860ff3]*/ +/*[clinic end generated code: output=384eb43aa0762a9d input=7e9dfc9b14804dd4]*/ { int result; @@ -13626,6 +13633,25 @@ os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class) #endif } +/*[clinic input] +os.DirEntry.is_junction -> bool + defining_class: defining_class + / + +Return True if the entry is a junction; cached per entry. +[clinic start generated code]*/ + +static int +os_DirEntry_is_junction_impl(DirEntry *self, PyTypeObject *defining_class) +/*[clinic end generated code: output=7061a07b0ef2cd1f input=475cd36fb7d4723f]*/ +{ +#ifdef MS_WINDOWS + return self->win32_lstat.st_reparse_tag == IO_REPARSE_TAG_MOUNT_POINT; +#else + return 0; +#endif +} + static PyObject * DirEntry_fetch_stat(PyObject *module, DirEntry *self, int follow_symlinks) { @@ -13693,8 +13719,7 @@ DirEntry_get_lstat(PyTypeObject *defining_class, DirEntry *self) self->lstat = DirEntry_fetch_stat(module, self, 0); #endif } - Py_XINCREF(self->lstat); - return self->lstat; + return Py_XNewRef(self->lstat); } /*[clinic input] @@ -13730,8 +13755,7 @@ os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, } } - Py_XINCREF(self->stat); - return self->stat; + return Py_XNewRef(self->stat); } /* Set exception and return -1 on error, 0 for False, 1 for True */ @@ -13905,8 +13929,7 @@ static PyObject * os_DirEntry___fspath___impl(DirEntry *self) /*[clinic end generated code: output=6dd7f7ef752e6f4f input=3c49d0cf38df4fac]*/ { - Py_INCREF(self->path); - return self->path; + return Py_NewRef(self->path); } static PyMemberDef DirEntry_members[] = { @@ -13923,6 +13946,7 @@ static PyMethodDef DirEntry_methods[] = { OS_DIRENTRY_IS_DIR_METHODDEF OS_DIRENTRY_IS_FILE_METHODDEF OS_DIRENTRY_IS_SYMLINK_METHODDEF + OS_DIRENTRY_IS_JUNCTION_METHODDEF OS_DIRENTRY_STAT_METHODDEF OS_DIRENTRY_INODE_METHODDEF OS_DIRENTRY___FSPATH___METHODDEF @@ -14114,8 +14138,7 @@ DirEntry_from_posix_info(PyObject *module, path_t *path, const char *name, goto error; if (path->fd != -1) { - entry->path = entry->name; - Py_INCREF(entry->path); + entry->path = Py_NewRef(entry->name); } else if (!entry->path) goto error; @@ -14306,8 +14329,7 @@ ScandirIterator_close(ScandirIterator *self, PyObject *args) static PyObject * ScandirIterator_enter(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -14515,8 +14537,7 @@ PyOS_FSPath(PyObject *path) PyObject *path_repr = NULL; if (PyUnicode_Check(path) || PyBytes_Check(path)) { - Py_INCREF(path); - return path; + return Py_NewRef(path); } func = _PyObject_LookupSpecial(path, &_Py_ID(__fspath__)); @@ -15881,8 +15902,7 @@ posixmodule_exec(PyObject *m) if (setup_confname_tables(m)) return -1; - Py_INCREF(PyExc_OSError); - PyModule_AddObject(m, "error", PyExc_OSError); + PyModule_AddObject(m, "error", Py_NewRef(PyExc_OSError)); #if defined(HAVE_WAITID) && !defined(__APPLE__) waitid_result_desc.name = MODNAME ".waitid_result"; @@ -15890,8 +15910,7 @@ posixmodule_exec(PyObject *m) if (WaitidResultType == NULL) { return -1; } - Py_INCREF(WaitidResultType); - PyModule_AddObject(m, "waitid_result", WaitidResultType); + PyModule_AddObject(m, "waitid_result", Py_NewRef(WaitidResultType)); state->WaitidResultType = WaitidResultType; #endif @@ -15903,10 +15922,9 @@ posixmodule_exec(PyObject *m) if (StatResultType == NULL) { return -1; } - Py_INCREF(StatResultType); - PyModule_AddObject(m, "stat_result", StatResultType); + PyModule_AddObject(m, "stat_result", Py_NewRef(StatResultType)); state->StatResultType = StatResultType; - structseq_new = ((PyTypeObject *)StatResultType)->tp_new; + state->statresult_new_orig = ((PyTypeObject *)StatResultType)->tp_new; ((PyTypeObject *)StatResultType)->tp_new = statresult_new; statvfs_result_desc.name = "os.statvfs_result"; /* see issue #19209 */ @@ -15914,18 +15932,8 @@ posixmodule_exec(PyObject *m) if (StatVFSResultType == NULL) { return -1; } - Py_INCREF(StatVFSResultType); - PyModule_AddObject(m, "statvfs_result", StatVFSResultType); + PyModule_AddObject(m, "statvfs_result", Py_NewRef(StatVFSResultType)); state->StatVFSResultType = StatVFSResultType; -#ifdef NEED_TICKS_PER_SECOND -# if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK) - ticks_per_second = sysconf(_SC_CLK_TCK); -# elif defined(HZ) - ticks_per_second = HZ; -# else - ticks_per_second = 60; /* magic fallback value; may be bogus */ -# endif -#endif #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) sched_param_desc.name = MODNAME ".sched_param"; @@ -15933,8 +15941,7 @@ posixmodule_exec(PyObject *m) if (SchedParamType == NULL) { return -1; } - Py_INCREF(SchedParamType); - PyModule_AddObject(m, "sched_param", SchedParamType); + PyModule_AddObject(m, "sched_param", Py_NewRef(SchedParamType)); state->SchedParamType = SchedParamType; ((PyTypeObject *)SchedParamType)->tp_new = os_sched_param; #endif @@ -15944,8 +15951,7 @@ posixmodule_exec(PyObject *m) if (TerminalSizeType == NULL) { return -1; } - Py_INCREF(TerminalSizeType); - PyModule_AddObject(m, "terminal_size", TerminalSizeType); + PyModule_AddObject(m, "terminal_size", Py_NewRef(TerminalSizeType)); state->TerminalSizeType = TerminalSizeType; /* initialize scandir types */ @@ -15959,8 +15965,7 @@ posixmodule_exec(PyObject *m) if (DirEntryType == NULL) { return -1; } - Py_INCREF(DirEntryType); - PyModule_AddObject(m, "DirEntry", DirEntryType); + PyModule_AddObject(m, "DirEntry", Py_NewRef(DirEntryType)); state->DirEntryType = DirEntryType; times_result_desc.name = MODNAME ".times_result"; @@ -15968,16 +15973,15 @@ posixmodule_exec(PyObject *m) if (TimesResultType == NULL) { return -1; } - Py_INCREF(TimesResultType); - PyModule_AddObject(m, "times_result", TimesResultType); + PyModule_AddObject(m, "times_result", Py_NewRef(TimesResultType)); state->TimesResultType = TimesResultType; PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc); if (UnameResultType == NULL) { return -1; } - Py_INCREF(UnameResultType); - PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType); + ; + PyModule_AddObject(m, "uname_result", Py_NewRef(UnameResultType)); state->UnameResultType = (PyObject *)UnameResultType; if ((state->billion = PyLong_FromLong(1000000000)) == NULL) diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c index 165cb0effae2e4..2440798bff7e66 100644 --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -710,7 +710,7 @@ pyexpat.xmlparser.Parse cls: defining_class data: object - isfinal: bool(accept={int}) = False + isfinal: bool = False / Parse XML data. @@ -721,7 +721,7 @@ Parse XML data. static PyObject * pyexpat_xmlparser_Parse_impl(xmlparseobject *self, PyTypeObject *cls, PyObject *data, int isfinal) -/*[clinic end generated code: output=8faffe07fe1f862a input=fc97f833558ca715]*/ +/*[clinic end generated code: output=8faffe07fe1f862a input=d0eb2a69fab3b9f1]*/ { const char *s; Py_ssize_t slen; @@ -959,8 +959,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate_impl(xmlparseobject *self, new_parser->itself = XML_ExternalEntityParserCreate(self->itself, context, encoding); new_parser->handlers = 0; - new_parser->intern = self->intern; - Py_XINCREF(new_parser->intern); + new_parser->intern = Py_XNewRef(self->intern); if (self->buffer != NULL) { new_parser->buffer = PyMem_Malloc(new_parser->buffer_size); @@ -991,8 +990,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate_impl(xmlparseobject *self, for (i = 0; handler_info[i].name != NULL; i++) { PyObject *handler = self->handlers[i]; if (handler != NULL) { - Py_INCREF(handler); - new_parser->handlers[i] = handler; + new_parser->handlers[i] = Py_NewRef(handler); handler_info[i].setter(new_parser->itself, handler_info[i].handler); } @@ -1148,8 +1146,7 @@ newxmlparseobject(pyexpat_state *state, const char *encoding, self->in_callback = 0; self->ns_prefixes = 0; self->handlers = NULL; - self->intern = intern; - Py_XINCREF(self->intern); + self->intern = Py_XNewRef(intern); /* namespace_separator is either NULL or contains one char + \0 */ self->itself = XML_ParserCreate_MM(encoding, &ExpatMemoryHandler, @@ -1232,8 +1229,7 @@ xmlparse_handler_getter(xmlparseobject *self, struct HandlerInfo *hi) PyObject *result = self->handlers[handlernum]; if (result == NULL) result = Py_None; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static int @@ -1365,9 +1361,7 @@ xmlparse_buffer_size_setter(xmlparseobject *self, PyObject *v, void *closure) /* check maximum */ if (new_buffer_size > INT_MAX) { - char errmsg[100]; - sprintf(errmsg, "buffer_size must not be greater than %i", INT_MAX); - PyErr_SetString(PyExc_ValueError, errmsg); + PyErr_Format(PyExc_ValueError, "buffer_size must not be greater than %i", INT_MAX); return -1; } @@ -1725,7 +1719,7 @@ add_error(PyObject *errors_module, PyObject *codes_dict, const int error_code = (int)error_index; /* NOTE: This keeps the source of truth regarding error - * messages with libexpat and (by definiton) in bulletproof sync + * messages with libexpat and (by definition) in bulletproof sync * with the other uses of the XML_ErrorString function * elsewhere within this file. pyexpat's copy of the messages * only acts as a fallback in case of outdated runtime libexpat, @@ -1796,15 +1790,13 @@ add_errors_module(PyObject *mod) goto error; } - Py_INCREF(codes_dict); - if (PyModule_AddObject(errors_module, "codes", codes_dict) < 0) { + if (PyModule_AddObject(errors_module, "codes", Py_NewRef(codes_dict)) < 0) { Py_DECREF(codes_dict); goto error; } Py_CLEAR(codes_dict); - Py_INCREF(rev_codes_dict); - if (PyModule_AddObject(errors_module, "messages", rev_codes_dict) < 0) { + if (PyModule_AddObject(errors_module, "messages", Py_NewRef(rev_codes_dict)) < 0) { Py_DECREF(rev_codes_dict); goto error; } diff --git a/Modules/readline.c b/Modules/readline.c index 27b89de7279464..fdb6356e1c84b5 100644 --- a/Modules/readline.c +++ b/Modules/readline.c @@ -402,8 +402,7 @@ set_hook(const char *funcname, PyObject **hook_var, PyObject *function) Py_CLEAR(*hook_var); } else if (PyCallable_Check(function)) { - Py_INCREF(function); - Py_XSETREF(*hook_var, function); + Py_XSETREF(*hook_var, Py_NewRef(function)); } else { PyErr_Format(PyExc_TypeError, @@ -524,8 +523,7 @@ static PyObject * readline_get_begidx_impl(PyObject *module) /*[clinic end generated code: output=362616ee8ed1b2b1 input=e083b81c8eb4bac3]*/ { - Py_INCREF(readlinestate_global->begidx); - return readlinestate_global->begidx; + return Py_NewRef(readlinestate_global->begidx); } /* Get the ending index for the scope of the tab-completion */ @@ -540,8 +538,7 @@ static PyObject * readline_get_endidx_impl(PyObject *module) /*[clinic end generated code: output=7f763350b12d7517 input=d4c7e34a625fd770]*/ { - Py_INCREF(readlinestate_global->endidx); - return readlinestate_global->endidx; + return Py_NewRef(readlinestate_global->endidx); } /* Set the tab-completion word-delimiters that readline uses */ @@ -784,8 +781,7 @@ readline_get_completer_impl(PyObject *module) if (readlinestate_global->completer == NULL) { Py_RETURN_NONE; } - Py_INCREF(readlinestate_global->completer); - return readlinestate_global->completer; + return Py_NewRef(readlinestate_global->completer); } /* Private function to get current length of history. XXX It may be diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 4eea928a2683ae..df4043de08dacc 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -1652,8 +1652,7 @@ select_epoll___enter___impl(pyEpoll_Object *self) if (self->epfd < 0) return pyepoll_err_closed(); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index bdd3f4b2204ef4..538a7e85bc950c 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -13,7 +13,7 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_pyerrors.h" // _PyErr_SetString() #include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_signal.h" // Py_NSIG +#include "pycore_signal.h" #ifndef MS_WINDOWS # include "posixmodule.h" @@ -23,12 +23,13 @@ #endif #ifdef MS_WINDOWS -# include <windows.h> # ifdef HAVE_PROCESS_H # include <process.h> # endif #endif +#include "pycore_signal.h" // Py_NSIG + #ifdef HAVE_SIGNAL_H # include <signal.h> #endif @@ -100,47 +101,13 @@ class sigset_t_converter(CConverter): may not be the thread that received the signal. */ -static volatile struct { - _Py_atomic_int tripped; - /* func is atomic to ensure that PyErr_SetInterrupt is async-signal-safe - * (even though it would probably be otherwise, anyway). - */ - _Py_atomic_address func; -} Handlers[Py_NSIG]; - -#ifdef MS_WINDOWS -#define INVALID_FD ((SOCKET_T)-1) - -static volatile struct { - SOCKET_T fd; - int warn_on_full_buffer; - int use_send; -} wakeup = {.fd = INVALID_FD, .warn_on_full_buffer = 1, .use_send = 0}; -#else -#define INVALID_FD (-1) -static volatile struct { -#ifdef __VXWORKS__ - int fd; -#else - sig_atomic_t fd; -#endif - int warn_on_full_buffer; -} wakeup = {.fd = INVALID_FD, .warn_on_full_buffer = 1}; -#endif - -/* Speed up sigcheck() when none tripped */ -static _Py_atomic_int is_tripped; - -typedef struct { - PyObject *default_handler; - PyObject *ignore_handler; -#ifdef MS_WINDOWS - HANDLE sigint_event; -#endif -} signal_state_t; +#define Handlers _PyRuntime.signals.handlers +#define wakeup _PyRuntime.signals.wakeup +#define is_tripped _PyRuntime.signals.is_tripped // State shared by all Python interpreters -static signal_state_t signal_global_state = {0}; +typedef struct _signals_runtime_state signal_state_t; +#define signal_global_state _PyRuntime.signals #if defined(HAVE_GETITIMER) || defined(HAVE_SETITIMER) # define PYHAVE_ITIMER_ERROR @@ -331,13 +298,7 @@ trip_signal(int sig_num) See bpo-30038 for more details. */ - int fd; -#ifdef MS_WINDOWS - fd = Py_SAFE_DOWNCAST(wakeup.fd, SOCKET_T, int); -#else - fd = wakeup.fd; -#endif - + int fd = wakeup.fd; if (fd != INVALID_FD) { unsigned char byte = (unsigned char)sig_num; #ifdef MS_WINDOWS @@ -407,7 +368,7 @@ signal_handler(int sig_num) #ifdef MS_WINDOWS if (sig_num == SIGINT) { signal_state_t *state = &signal_global_state; - SetEvent(state->sigint_event); + SetEvent((HANDLE)state->sigint_event); } #endif } @@ -626,13 +587,14 @@ signal.strsignal Return the system description of the given signal. -The return values can be such as "Interrupt", "Segmentation fault", etc. -Returns None if the signal is not recognized. +Returns the description of signal *signalnum*, such as "Interrupt" +for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no +description. Raises :exc:`ValueError` if *signalnum* is invalid. [clinic start generated code]*/ static PyObject * signal_strsignal_impl(PyObject *module, int signalnum) -/*[clinic end generated code: output=44e12e1e3b666261 input=b77914b03f856c74]*/ +/*[clinic end generated code: output=44e12e1e3b666261 input=238b335847778bc0]*/ { const char *res; @@ -821,7 +783,7 @@ signal_set_wakeup_fd(PyObject *self, PyObject *args, PyObject *kwds) } old_sockfd = wakeup.fd; - wakeup.fd = sockfd; + wakeup.fd = Py_SAFE_DOWNCAST(sockfd, SOCKET_T, int); wakeup.warn_on_full_buffer = warn_on_full_buffer; wakeup.use_send = is_socket; @@ -872,11 +834,7 @@ PySignal_SetWakeupFd(int fd) fd = -1; } -#ifdef MS_WINDOWS - int old_fd = Py_SAFE_DOWNCAST(wakeup.fd, SOCKET_T, int); -#else int old_fd = wakeup.fd; -#endif wakeup.fd = fd; wakeup.warn_on_full_buffer = 1; return old_fd; @@ -1653,6 +1611,8 @@ signal_module_exec(PyObject *m) signal_state_t *state = &signal_global_state; _signal_module_state *modstate = get_signal_state(m); + // XXX For proper isolation, these values must be guaranteed + // to be effectively const (e.g. immortal). modstate->default_handler = state->default_handler; // borrowed ref modstate->ignore_handler = state->ignore_handler; // borrowed ref @@ -1782,7 +1742,7 @@ _PySignal_Fini(void) #ifdef MS_WINDOWS if (state->sigint_event != NULL) { - CloseHandle(state->sigint_event); + CloseHandle((HANDLE)state->sigint_event); state->sigint_event = NULL; } #endif @@ -2008,7 +1968,7 @@ _PySignal_Init(int install_signal_handlers) #ifdef MS_WINDOWS /* Create manual-reset event, initially unset */ - state->sigint_event = CreateEvent(NULL, TRUE, FALSE, FALSE); + state->sigint_event = (void *)CreateEvent(NULL, TRUE, FALSE, FALSE); if (state->sigint_event == NULL) { PyErr_SetFromWindowsErr(0); return -1; diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 869bacde924d83..2c59c2f2c89b25 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -247,6 +247,10 @@ shutdown(how) -- shut down traffic in one or both directions\n\ #include <net/if.h> #endif +#ifdef HAVE_NET_ETHERNET_H +#include <net/ethernet.h> +#endif + /* Generic socket object definitions and includes */ #define PySocket_BUILDING_SOCKET #include "socketmodule.h" @@ -2924,8 +2928,8 @@ sock_setblocking(PySocketSockObject *s, PyObject *arg) { long block; - block = PyLong_AsLong(arg); - if (block == -1 && PyErr_Occurred()) + block = PyObject_IsTrue(arg); + if (block < 0) return NULL; s->sock_timeout = _PyTime_FromSeconds(block ? -1 : 0); @@ -4106,8 +4110,7 @@ makeval_recvmsg(ssize_t received, void *data) if (received < PyBytes_GET_SIZE(*buf)) _PyBytes_Resize(buf, received); - Py_XINCREF(*buf); - return *buf; + return Py_XNewRef(*buf); } /* s.recvmsg(bufsize[, ancbufsize[, flags]]) method */ @@ -4386,8 +4389,7 @@ sock_sendall(PySocketSockObject *s, PyObject *args) } while (len > 0); PyBuffer_Release(&pbuf); - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); done: PyBuffer_Release(&pbuf); @@ -7342,29 +7344,22 @@ PyInit__socket(void) if (m == NULL) return NULL; - Py_INCREF(PyExc_OSError); - PyModule_AddObject(m, "error", PyExc_OSError); + PyModule_AddObject(m, "error", Py_NewRef(PyExc_OSError)); socket_herror = PyErr_NewException("socket.herror", PyExc_OSError, NULL); if (socket_herror == NULL) return NULL; - Py_INCREF(socket_herror); - PyModule_AddObject(m, "herror", socket_herror); + PyModule_AddObject(m, "herror", Py_NewRef(socket_herror)); socket_gaierror = PyErr_NewException("socket.gaierror", PyExc_OSError, NULL); if (socket_gaierror == NULL) return NULL; - Py_INCREF(socket_gaierror); - PyModule_AddObject(m, "gaierror", socket_gaierror); + PyModule_AddObject(m, "gaierror", Py_NewRef(socket_gaierror)); PyModule_AddObjectRef(m, "timeout", PyExc_TimeoutError); - Py_INCREF((PyObject *)&sock_type); - if (PyModule_AddObject(m, "SocketType", - (PyObject *)&sock_type) != 0) + if (PyModule_AddObject(m, "SocketType", Py_NewRef(&sock_type)) != 0) return NULL; - Py_INCREF((PyObject *)&sock_type); - if (PyModule_AddObject(m, "socket", - (PyObject *)&sock_type) != 0) + if (PyModule_AddObject(m, "socket", Py_NewRef(&sock_type)) != 0) return NULL; #ifdef ENABLE_IPV6 @@ -7372,8 +7367,7 @@ PyInit__socket(void) #else has_ipv6 = Py_False; #endif - Py_INCREF(has_ipv6); - PyModule_AddObject(m, "has_ipv6", has_ipv6); + PyModule_AddObject(m, "has_ipv6", Py_NewRef(has_ipv6)); /* Export C API */ PySocketModule_APIObject *capi = sock_get_api(); @@ -7711,6 +7705,25 @@ PyInit__socket(void) PyModule_AddIntMacro(m, ALG_OP_VERIFY); #endif +/* IEEE 802.3 protocol numbers required for a standard TCP/IP network stack */ +#ifdef ETHERTYPE_ARP + PyModule_AddIntMacro(m, ETHERTYPE_ARP); +#endif +#ifdef ETHERTYPE_IP + PyModule_AddIntMacro(m, ETHERTYPE_IP); +#endif +#ifdef ETHERTYPE_IPV6 + PyModule_AddIntMacro(m, ETHERTYPE_IPV6); +#endif +#ifdef ETHERTYPE_VLAN + PyModule_AddIntMacro(m, ETHERTYPE_VLAN); +#endif + +/* Linux pseudo-protocol for sniffing every packet */ +#ifdef ETH_P_ALL + PyModule_AddIntMacro(m, ETH_P_ALL); +#endif + /* Socket types */ PyModule_AddIntMacro(m, SOCK_STREAM); PyModule_AddIntMacro(m, SOCK_DGRAM); @@ -8471,18 +8484,78 @@ PyInit__socket(void) #ifdef TCP_QUICKACK PyModule_AddIntMacro(m, TCP_QUICKACK); #endif -#ifdef TCP_FASTOPEN - PyModule_AddIntMacro(m, TCP_FASTOPEN); -#endif #ifdef TCP_CONGESTION PyModule_AddIntMacro(m, TCP_CONGESTION); #endif +#ifdef TCP_MD5SIG + PyModule_AddIntMacro(m, TCP_MD5SIG); +#endif +#ifdef TCP_THIN_LINEAR_TIMEOUTS + PyModule_AddIntMacro(m, TCP_THIN_LINEAR_TIMEOUTS); +#endif +#ifdef TCP_THIN_DUPACK + PyModule_AddIntMacro(m, TCP_THIN_DUPACK); +#endif #ifdef TCP_USER_TIMEOUT PyModule_AddIntMacro(m, TCP_USER_TIMEOUT); #endif +#ifdef TCP_REPAIR + PyModule_AddIntMacro(m, TCP_REPAIR); +#endif +#ifdef TCP_REPAIR_QUEUE + PyModule_AddIntMacro(m, TCP_REPAIR_QUEUE); +#endif +#ifdef TCP_QUEUE_SEQ + PyModule_AddIntMacro(m, TCP_QUEUE_SEQ); +#endif +#ifdef TCP_REPAIR_OPTIONS + PyModule_AddIntMacro(m, TCP_REPAIR_OPTIONS); +#endif +#ifdef TCP_FASTOPEN + PyModule_AddIntMacro(m, TCP_FASTOPEN); +#endif +#ifdef TCP_TIMESTAMP + PyModule_AddIntMacro(m, TCP_TIMESTAMP); +#endif #ifdef TCP_NOTSENT_LOWAT PyModule_AddIntMacro(m, TCP_NOTSENT_LOWAT); #endif +#ifdef TCP_CC_INFO + PyModule_AddIntMacro(m, TCP_CC_INFO); +#endif +#ifdef TCP_SAVE_SYN + PyModule_AddIntMacro(m, TCP_SAVE_SYN); +#endif +#ifdef TCP_SAVED_SYN + PyModule_AddIntMacro(m, TCP_SAVED_SYN); +#endif +#ifdef TCP_REPAIR_WINDOW + PyModule_AddIntMacro(m, TCP_REPAIR_WINDOW); +#endif +#ifdef TCP_FASTOPEN_CONNECT + PyModule_AddIntMacro(m, TCP_FASTOPEN_CONNECT); +#endif +#ifdef TCP_ULP + PyModule_AddIntMacro(m, TCP_ULP); +#endif +#ifdef TCP_MD5SIG_EXT + PyModule_AddIntMacro(m, TCP_MD5SIG_EXT); +#endif +#ifdef TCP_FASTOPEN_KEY + PyModule_AddIntMacro(m, TCP_FASTOPEN_KEY); +#endif +#ifdef TCP_FASTOPEN_NO_COOKIE + PyModule_AddIntMacro(m, TCP_FASTOPEN_NO_COOKIE); +#endif +#ifdef TCP_ZEROCOPY_RECEIVE + PyModule_AddIntMacro(m, TCP_ZEROCOPY_RECEIVE); +#endif +#ifdef TCP_INQ + PyModule_AddIntMacro(m, TCP_INQ); +#endif +#ifdef TCP_TX_DELAY + PyModule_AddIntMacro(m, TCP_TX_DELAY); +#endif /* IPX options */ #ifdef IPX_TYPE diff --git a/Modules/symtablemodule.c b/Modules/symtablemodule.c index c25ecc2b5dc7a6..4ef1d8cde07db6 100644 --- a/Modules/symtablemodule.c +++ b/Modules/symtablemodule.c @@ -56,8 +56,7 @@ _symtable_symtable_impl(PyObject *module, PyObject *source, if (st == NULL) { return NULL; } - t = (PyObject *)st->st_top; - Py_INCREF(t); + t = Py_NewRef(st->st_top); _PySymtable_Free(st); return t; } diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c index 5137d01c6887c6..f45aa5227f1cbf 100644 --- a/Modules/syslogmodule.c +++ b/Modules/syslogmodule.c @@ -61,10 +61,16 @@ module syslog #include "clinic/syslogmodule.c.h" -/* only one instance, only one syslog, so globals should be ok */ -static PyObject *S_ident_o = NULL; /* identifier, held by openlog() */ +/* only one instance, only one syslog, so globals should be ok, + * these fields are writable from the main interpreter only. */ +static PyObject *S_ident_o = NULL; // identifier, held by openlog() static char S_log_open = 0; +static inline int +is_main_interpreter(void) +{ + return (PyInterpreterState_Get() == PyInterpreterState_Main()); +} static PyObject * syslog_get_argv(void) @@ -135,6 +141,13 @@ syslog_openlog_impl(PyObject *module, PyObject *ident, long logopt, long facility) /*[clinic end generated code: output=5476c12829b6eb75 input=8a987a96a586eee7]*/ { + // Since the sys.openlog changes the process level state of syslog library, + // this operation is only allowed for the main interpreter. + if (!is_main_interpreter()) { + PyErr_SetString(PyExc_RuntimeError, "subinterpreter can't use syslog.openlog()"); + return NULL; + } + const char *ident_str = NULL; if (ident) { @@ -195,6 +208,11 @@ syslog_syslog_impl(PyObject *module, int group_left_1, int priority, /* if log is not opened, open it now */ if (!S_log_open) { + if (!is_main_interpreter()) { + PyErr_SetString(PyExc_RuntimeError, "subinterpreter can't use syslog.syslog() " + "until the syslog is opened by the main interpreter"); + return NULL; + } PyObject *openlog_ret = syslog_openlog_impl(module, NULL, 0, LOG_USER); if (openlog_ret == NULL) { return NULL; @@ -205,8 +223,7 @@ syslog_syslog_impl(PyObject *module, int group_left_1, int priority, /* Incref ident, because it can be decrefed if syslog.openlog() is * called when the GIL is released. */ - PyObject *ident = S_ident_o; - Py_XINCREF(ident); + PyObject *ident = Py_XNewRef(S_ident_o); #ifdef __APPLE__ // gh-98178: On macOS, libc syslog() is not thread-safe syslog(priority, "%s", message); @@ -230,6 +247,13 @@ static PyObject * syslog_closelog_impl(PyObject *module) /*[clinic end generated code: output=97890a80a24b1b84 input=fb77a54d447acf07]*/ { + // Since the sys.closelog changes the process level state of syslog library, + // this operation is only allowed for the main interpreter. + if (!is_main_interpreter()) { + PyErr_SetString(PyExc_RuntimeError, "sunbinterpreter can't use syslog.closelog()"); + return NULL; + } + if (PySys_Audit("syslog.closelog", NULL) < 0) { return NULL; } diff --git a/Modules/termios.c b/Modules/termios.c index 354e5ca18d04d8..fcc8f042679870 100644 --- a/Modules/termios.c +++ b/Modules/termios.c @@ -82,7 +82,12 @@ termios_tcgetattr_impl(PyObject *module, int fd) { termiosmodulestate *state = PyModule_GetState(module); struct termios mode; - if (tcgetattr(fd, &mode) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcgetattr(fd, &mode); + Py_END_ALLOW_THREADS + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -169,7 +174,12 @@ termios_tcsetattr_impl(PyObject *module, int fd, int when, PyObject *term) /* Get the old mode, in case there are any hidden fields... */ termiosmodulestate *state = PyModule_GetState(module); struct termios mode; - if (tcgetattr(fd, &mode) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcgetattr(fd, &mode); + Py_END_ALLOW_THREADS + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -211,7 +221,12 @@ termios_tcsetattr_impl(PyObject *module, int fd, int when, PyObject *term) return PyErr_SetFromErrno(state->TermiosError); if (cfsetospeed(&mode, (speed_t) ospeed) == -1) return PyErr_SetFromErrno(state->TermiosError); - if (tcsetattr(fd, when, &mode) == -1) + + Py_BEGIN_ALLOW_THREADS + r = tcsetattr(fd, when, &mode); + Py_END_ALLOW_THREADS + + if (r == -1) return PyErr_SetFromErrno(state->TermiosError); Py_RETURN_NONE; @@ -235,7 +250,13 @@ termios_tcsendbreak_impl(PyObject *module, int fd, int duration) /*[clinic end generated code: output=5945f589b5d3ac66 input=dc2f32417691f8ed]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcsendbreak(fd, duration) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcsendbreak(fd, duration); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -256,7 +277,13 @@ termios_tcdrain_impl(PyObject *module, int fd) /*[clinic end generated code: output=5fd86944c6255955 input=c99241b140b32447]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcdrain(fd) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcdrain(fd); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -282,7 +309,13 @@ termios_tcflush_impl(PyObject *module, int fd, int queue) /*[clinic end generated code: output=2424f80312ec2f21 input=0f7d08122ddc07b5]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcflush(fd, queue) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcflush(fd, queue); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -308,7 +341,13 @@ termios_tcflow_impl(PyObject *module, int fd, int action) /*[clinic end generated code: output=afd10928e6ea66eb input=c6aff0640b6efd9c]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcflow(fd, action) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcflow(fd, action); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -333,7 +372,13 @@ termios_tcgetwinsize_impl(PyObject *module, int fd) #if defined(TIOCGWINSZ) termiosmodulestate *state = PyModule_GetState(module); struct winsize w; - if (ioctl(fd, TIOCGWINSZ, &w) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCGWINSZ, &w); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -352,7 +397,12 @@ termios_tcgetwinsize_impl(PyObject *module, int fd) #elif defined(TIOCGSIZE) termiosmodulestate *state = PyModule_GetState(module); struct ttysize s; - if (ioctl(fd, TIOCGSIZE, &s) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCGSIZE, &s); + Py_END_ALLOW_THREADS + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -433,15 +483,25 @@ termios_tcsetwinsize_impl(PyObject *module, int fd, PyObject *winsz) return NULL; } - if (ioctl(fd, TIOCSWINSZ, &w) == -1) { + int r; + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCSWINSZ, &w); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } Py_RETURN_NONE; #elif defined(TIOCGSIZE) && defined(TIOCSSIZE) struct ttysize s; + int r; /* Get the old ttysize because it might have more fields. */ - if (ioctl(fd, TIOCGSIZE, &s) == -1) { + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCGSIZE, &s); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -453,7 +513,11 @@ termios_tcsetwinsize_impl(PyObject *module, int fd, PyObject *winsz) return NULL; } - if (ioctl(fd, TIOCSSIZE, &s) == -1) { + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCSSIZE, &s); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } diff --git a/Modules/timemodule.c b/Modules/timemodule.c index 11c888af03e82d..c2bacaae0c0339 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -62,6 +62,56 @@ #define SEC_TO_NS (1000 * 1000 * 1000) +#if defined(HAVE_TIMES) || defined(HAVE_CLOCK) +static int +check_ticks_per_second(long tps, const char *context) +{ + /* Effectively, check that _PyTime_MulDiv(t, SEC_TO_NS, ticks_per_second) + cannot overflow. */ + if (tps >= 0 && (_PyTime_t)tps > _PyTime_MAX / SEC_TO_NS) { + PyErr_Format(PyExc_OverflowError, "%s is too large", context); + return -1; + } + return 0; +} +#endif /* HAVE_TIMES || HAVE_CLOCK */ + +#ifdef HAVE_TIMES + +# define ticks_per_second _PyRuntime.time.ticks_per_second + +static void +ensure_ticks_per_second(void) +{ + if (_PyRuntime.time.ticks_per_second_initialized) { + return; + } + _PyRuntime.time.ticks_per_second_initialized = 1; +# if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK) + ticks_per_second = sysconf(_SC_CLK_TCK); + if (ticks_per_second < 1) { + ticks_per_second = -1; + } +# elif defined(HZ) + ticks_per_second = HZ; +# else + ticks_per_second = 60; /* magic fallback value; may be bogus */ +# endif +} + +#endif /* HAVE_TIMES */ + + +PyStatus +_PyTime_Init(void) +{ +#ifdef HAVE_TIMES + ensure_ticks_per_second(); +#endif + return PyStatus_Ok(); +} + + /* Forward declarations */ static int pysleep(_PyTime_t timeout); @@ -140,18 +190,8 @@ Return the current time in nanoseconds since the Epoch."); static int _PyTime_GetClockWithInfo(_PyTime_t *tp, _Py_clock_info_t *info) { - static int initialized = 0; - - if (!initialized) { - initialized = 1; - - /* Make sure that _PyTime_MulDiv(ticks, SEC_TO_NS, CLOCKS_PER_SEC) - above cannot overflow */ - if ((_PyTime_t)CLOCKS_PER_SEC > _PyTime_MAX / SEC_TO_NS) { - PyErr_SetString(PyExc_OverflowError, - "CLOCKS_PER_SEC is too large"); - return -1; - } + if (check_ticks_per_second(CLOCKS_PER_SEC, "CLOCKS_PER_SEC") < 0) { + return -1; } if (info) { @@ -1308,36 +1348,10 @@ _PyTime_GetProcessTimeWithInfo(_PyTime_t *tp, _Py_clock_info_t *info) struct tms t; if (times(&t) != (clock_t)-1) { - static long ticks_per_second = -1; - - if (ticks_per_second == -1) { - long freq; -#if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK) - freq = sysconf(_SC_CLK_TCK); - if (freq < 1) { - freq = -1; - } -#elif defined(HZ) - freq = HZ; -#else - freq = 60; /* magic fallback value; may be bogus */ -#endif - - if (freq != -1) { - /* check that _PyTime_MulDiv(t, SEC_TO_NS, ticks_per_second) - cannot overflow below */ -#if LONG_MAX > _PyTime_MAX / SEC_TO_NS - if ((_PyTime_t)freq > _PyTime_MAX / SEC_TO_NS) { - PyErr_SetString(PyExc_OverflowError, - "_SC_CLK_TCK is too large"); - return -1; - } -#endif - - ticks_per_second = freq; - } + assert(_PyRuntime.time.ticks_per_second_initialized); + if (check_ticks_per_second(ticks_per_second, "_SC_CLK_TCK") < 0) { + return -1; } - if (ticks_per_second != -1) { if (info) { info->implementation = "times()"; diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c index 32014707bce747..59fccd4b834dd3 100644 --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -159,8 +159,7 @@ unicodedata_UCD_decimal_impl(PyObject *self, int chr, return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } return PyLong_FromLong(rc); @@ -194,8 +193,7 @@ unicodedata_UCD_digit_impl(PyObject *self, int chr, PyObject *default_value) return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } return PyLong_FromLong(rc); @@ -246,8 +244,7 @@ unicodedata_UCD_numeric_impl(PyObject *self, int chr, return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } return PyFloat_FromDouble(rc); @@ -917,8 +914,7 @@ unicodedata_UCD_is_normalized_impl(PyObject *self, PyObject *form, result = (m == YES) ? Py_True : Py_False; } - Py_INCREF(result); - return result; + return Py_NewRef(result); } @@ -943,39 +939,34 @@ unicodedata_UCD_normalize_impl(PyObject *self, PyObject *form, if (PyUnicode_GET_LENGTH(input) == 0) { /* Special case empty input strings, since resizing them later would cause internal errors. */ - Py_INCREF(input); - return input; + return Py_NewRef(input); } if (PyUnicode_CompareWithASCIIString(form, "NFC") == 0) { if (is_normalized_quickcheck(self, input, true, false, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfc_nfkc(self, input, 0); } if (PyUnicode_CompareWithASCIIString(form, "NFKC") == 0) { if (is_normalized_quickcheck(self, input, true, true, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfc_nfkc(self, input, 1); } if (PyUnicode_CompareWithASCIIString(form, "NFD") == 0) { if (is_normalized_quickcheck(self, input, false, false, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfd_nfkd(self, input, 0); } if (PyUnicode_CompareWithASCIIString(form, "NFKD") == 0) { if (is_normalized_quickcheck(self, input, false, true, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfd_nfkd(self, input, 1); } @@ -1370,8 +1361,7 @@ unicodedata_UCD_name_impl(PyObject *self, int chr, PyObject *default_value) return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } diff --git a/Modules/xxlimited.c b/Modules/xxlimited.c index e234504e331945..5f5297ba6337af 100644 --- a/Modules/xxlimited.c +++ b/Modules/xxlimited.c @@ -155,8 +155,7 @@ Xxo_getattro(XxoObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -210,18 +209,15 @@ Xxo_demo(XxoObject *self, PyTypeObject *defining_class, /* Test if the argument is "str" */ if (PyUnicode_Check(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } /* test if the argument is of the Xxo class */ if (PyObject_TypeCheck(o, defining_class)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef Xxo_methods[] = { diff --git a/Modules/xxlimited_35.c b/Modules/xxlimited_35.c index 8d29c71951768a..361c7e76d77f50 100644 --- a/Modules/xxlimited_35.c +++ b/Modules/xxlimited_35.c @@ -64,11 +64,9 @@ Xxo_demo(XxoObject *self, PyObject *args) return NULL; /* Test availability of fast type checks */ if (o != NULL && PyUnicode_Check(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef Xxo_methods[] = { @@ -83,8 +81,7 @@ Xxo_getattro(XxoObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -176,8 +173,7 @@ xx_roj(PyObject *self, PyObject *args) long b; if (!PyArg_ParseTuple(args, "O#:roj", &a, &b)) return NULL; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } diff --git a/Modules/xxmodule.c b/Modules/xxmodule.c index a6e5071d1d6303..a676fdb4ec773a 100644 --- a/Modules/xxmodule.c +++ b/Modules/xxmodule.c @@ -52,8 +52,7 @@ Xxo_demo(XxoObject *self, PyObject *args) { if (!PyArg_ParseTuple(args, ":demo")) return NULL; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef Xxo_methods[] = { @@ -68,8 +67,7 @@ Xxo_getattro(XxoObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -195,8 +193,7 @@ xx_bug(PyObject *self, PyObject *args) printf("\n"); /* Py_DECREF(item); */ - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } /* Test bad format character */ @@ -208,8 +205,7 @@ xx_roj(PyObject *self, PyObject *args) long b; if (!PyArg_ParseTuple(args, "O#:roj", &a, &b)) return NULL; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } @@ -266,8 +262,7 @@ static PyTypeObject Str_Type = { static PyObject * null_richcompare(PyObject *self, PyObject *other, int op) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + return Py_NewRef(Py_NotImplemented); } static PyTypeObject Null_Type = { diff --git a/Modules/xxsubtype.c b/Modules/xxsubtype.c index 12306f2fc5247c..8512baf7cd0a2d 100644 --- a/Modules/xxsubtype.c +++ b/Modules/xxsubtype.c @@ -39,8 +39,7 @@ spamlist_setstate(spamlistobject *self, PyObject *args) if (!PyArg_ParseTuple(args, "i:setstate", &state)) return NULL; self->state = state; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyObject * @@ -53,12 +52,9 @@ spamlist_specialmeth(PyObject *self, PyObject *args, PyObject *kw) self = Py_None; if (kw == NULL) kw = Py_None; - Py_INCREF(self); - PyTuple_SET_ITEM(result, 0, self); - Py_INCREF(args); - PyTuple_SET_ITEM(result, 1, args); - Py_INCREF(kw); - PyTuple_SET_ITEM(result, 2, kw); + PyTuple_SET_ITEM(result, 0, Py_NewRef(self)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(args)); + PyTuple_SET_ITEM(result, 2, Py_NewRef(kw)); } return result; } @@ -164,8 +160,7 @@ spamdict_setstate(spamdictobject *self, PyObject *args) if (!PyArg_ParseTuple(args, "i:setstate", &state)) return NULL; self->state = state; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef spamdict_methods[] = { @@ -279,14 +274,12 @@ xxsubtype_exec(PyObject* m) if (PyType_Ready(&spamdict_type) < 0) return -1; - Py_INCREF(&spamlist_type); if (PyModule_AddObject(m, "spamlist", - (PyObject *) &spamlist_type) < 0) + Py_NewRef(&spamlist_type)) < 0) return -1; - Py_INCREF(&spamdict_type); if (PyModule_AddObject(m, "spamdict", - (PyObject *) &spamdict_type) < 0) + Py_NewRef(&spamdict_type)) < 0) return -1; return 0; } diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c index 2a490ed5d183a7..1cdfd01320288b 100644 --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -671,8 +671,7 @@ zlib_decompressobj_impl(PyObject *module, int wbits, PyObject *zdict) self->zst.next_in = NULL; self->zst.avail_in = 0; if (zdict != NULL) { - Py_INCREF(zdict); - self->zdict = zdict; + self->zdict = Py_NewRef(zdict); } int err = inflateInit2(&self->zst, wbits); switch (err) { @@ -1089,12 +1088,9 @@ zlib_Compress_copy_impl(compobject *self, PyTypeObject *cls) zlib_error(state, self->zst, err, "while copying compression object"); goto error; } - Py_INCREF(self->unused_data); - Py_XSETREF(return_value->unused_data, self->unused_data); - Py_INCREF(self->unconsumed_tail); - Py_XSETREF(return_value->unconsumed_tail, self->unconsumed_tail); - Py_XINCREF(self->zdict); - Py_XSETREF(return_value->zdict, self->zdict); + Py_XSETREF(return_value->unused_data, Py_NewRef(self->unused_data)); + Py_XSETREF(return_value->unconsumed_tail, Py_NewRef(self->unconsumed_tail)); + Py_XSETREF(return_value->zdict, Py_XNewRef(self->zdict)); return_value->eof = self->eof; /* Mark it as being initialized */ @@ -1177,12 +1173,9 @@ zlib_Decompress_copy_impl(compobject *self, PyTypeObject *cls) goto error; } - Py_INCREF(self->unused_data); - Py_XSETREF(return_value->unused_data, self->unused_data); - Py_INCREF(self->unconsumed_tail); - Py_XSETREF(return_value->unconsumed_tail, self->unconsumed_tail); - Py_XINCREF(self->zdict); - Py_XSETREF(return_value->zdict, self->zdict); + Py_XSETREF(return_value->unused_data, Py_NewRef(self->unused_data)); + Py_XSETREF(return_value->unconsumed_tail, Py_NewRef(self->unconsumed_tail)); + Py_XSETREF(return_value->zdict, Py_XNewRef(self->zdict)); return_value->eof = self->eof; /* Mark it as being initialized */ @@ -1440,11 +1433,11 @@ arrange_output_buffer_with_maximum(uint32_t *avail_out, return length; } -/* Decompress data of length self->avail_in_real in self->state.next_in. The - output buffer is allocated dynamically and returned. If the max_length is - of sufficiently low size, max_length is allocated immediately. At most - max_length bytes are returned, so some of the input may not be consumed. - self->state.next_in and self->avail_in_real are updated to reflect the +/* Decompress data of length self->avail_in_real in self->state.next_in. The + output buffer is allocated dynamically and returned. If the max_length is + of sufficiently low size, max_length is allocated immediately. At most + max_length bytes are returned, so some of the input may not be consumed. + self->state.next_in and self->avail_in_real are updated to reflect the consumed input. */ static PyObject* decompress_buf(ZlibDecompressor *self, Py_ssize_t max_length) @@ -1456,11 +1449,11 @@ decompress_buf(ZlibDecompressor *self, Py_ssize_t max_length) Py_ssize_t hard_limit; Py_ssize_t obuflen; zlibstate *state = PyType_GetModuleState(Py_TYPE(self)); - + int err = Z_OK; - /* When sys.maxsize is passed as default use DEF_BUF_SIZE as start buffer. - In this particular case the data may not necessarily be very big, so + /* When sys.maxsize is passed as default use DEF_BUF_SIZE as start buffer. + In this particular case the data may not necessarily be very big, so it is better to grow dynamically.*/ if ((max_length < 0) || max_length == PY_SSIZE_T_MAX) { hard_limit = PY_SSIZE_T_MAX; @@ -1544,7 +1537,7 @@ decompress_buf(ZlibDecompressor *self, Py_ssize_t max_length) static PyObject * -decompress(ZlibDecompressor *self, uint8_t *data, +decompress(ZlibDecompressor *self, uint8_t *data, size_t len, Py_ssize_t max_length) { bool input_buffer_in_use; @@ -1713,12 +1706,12 @@ PyDoc_STRVAR(ZlibDecompressor__new____doc__, "\n"); static PyObject * -ZlibDecompressor__new__(PyTypeObject *cls, - PyObject *args, +ZlibDecompressor__new__(PyTypeObject *cls, + PyObject *args, PyObject *kwargs) { static char *keywords[] = {"wbits", "zdict", NULL}; - static char *format = "|iO:_ZlibDecompressor"; + static const char * const format = "|iO:_ZlibDecompressor"; int wbits = MAX_WBITS; PyObject *zdict = NULL; zlibstate *state = PyType_GetModuleState(cls); @@ -1727,16 +1720,13 @@ ZlibDecompressor__new__(PyTypeObject *cls, args, kwargs, format, keywords, &wbits, &zdict)) { return NULL; } - ZlibDecompressor *self = PyObject_New(ZlibDecompressor, cls); + ZlibDecompressor *self = PyObject_New(ZlibDecompressor, cls); self->eof = 0; self->needs_input = 1; self->avail_in_real = 0; self->input_buffer = NULL; self->input_buffer_size = 0; - if (zdict != NULL) { - Py_INCREF(zdict); - } - self->zdict = zdict; + self->zdict = Py_XNewRef(zdict); self->zst.opaque = NULL; self->zst.zalloc = PyZlib_Malloc; self->zst.zfree = PyZlib_Free; @@ -2042,14 +2032,12 @@ zlib_exec(PyObject *mod) return -1; } - Py_INCREF(state->ZlibError); - if (PyModule_AddObject(mod, "error", state->ZlibError) < 0) { + if (PyModule_AddObject(mod, "error", Py_NewRef(state->ZlibError)) < 0) { Py_DECREF(state->ZlibError); return -1; } - Py_INCREF(state->ZlibDecompressorType); - if (PyModule_AddObject(mod, "_ZlibDecompressor", - (PyObject *)state->ZlibDecompressorType) < 0) { + if (PyModule_AddObject(mod, "_ZlibDecompressor", + Py_NewRef(state->ZlibDecompressorType)) < 0) { Py_DECREF(state->ZlibDecompressorType); return -1; } diff --git a/Objects/abstract.c b/Objects/abstract.c index 5d50491b2dd347..9dc74fb9c2608c 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -45,8 +45,7 @@ PyObject_Type(PyObject *o) } v = (PyObject *)Py_TYPE(o); - Py_INCREF(v); - return v; + return Py_NewRef(v); } Py_ssize_t @@ -722,9 +721,7 @@ PyBuffer_FillInfo(Py_buffer *view, PyObject *obj, void *buf, Py_ssize_t len, return -1; } - view->obj = obj; - if (obj) - Py_INCREF(obj); + view->obj = Py_XNewRef(obj); view->buf = buf; view->len = len; view->readonly = readonly; @@ -776,8 +773,7 @@ PyObject_Format(PyObject *obj, PyObject *format_spec) /* Fast path for common types. */ if (format_spec == NULL || PyUnicode_GET_LENGTH(format_spec) == 0) { if (PyUnicode_CheckExact(obj)) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } if (PyLong_CheckExact(obj)) { return PyObject_Str(obj); @@ -810,8 +806,7 @@ PyObject_Format(PyObject *obj, PyObject *format_spec) PyErr_Format(PyExc_TypeError, "__format__ must return a str, not %.200s", Py_TYPE(result)->tp_name); - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); goto done; } @@ -1405,8 +1400,7 @@ _PyNumber_Index(PyObject *item) } if (PyLong_Check(item)) { - Py_INCREF(item); - return item; + return Py_NewRef(item); } if (!_PyIndex_Check(item)) { PyErr_Format(PyExc_TypeError, @@ -1520,8 +1514,7 @@ PyNumber_Long(PyObject *o) } if (PyLong_CheckExact(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } m = Py_TYPE(o)->tp_as_number; if (m && m->nb_int) { /* This should include subclasses of int */ @@ -2045,8 +2038,7 @@ PySequence_Tuple(PyObject *v) a tuple *subclass* instance as-is, hence the restriction to exact tuples here. In contrast, lists always make a copy, so there's no need for exactness below. */ - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (PyList_CheckExact(v)) return PyList_AsTuple(v); @@ -2144,8 +2136,7 @@ PySequence_Fast(PyObject *v, const char *m) } if (PyList_CheckExact(v) || PyTuple_CheckExact(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } it = PyObject_GetIter(v); @@ -2799,8 +2790,7 @@ PyObject_GetIter(PyObject *o) "iter() returned non-iterator " "of type '%.100s'", Py_TYPE(res)->tp_name); - Py_DECREF(res); - res = NULL; + Py_SETREF(res, NULL); } return res; } @@ -2820,8 +2810,7 @@ PyObject_GetAIter(PyObject *o) { PyErr_Format(PyExc_TypeError, "aiter() returned not an async iterator of type '%.100s'", Py_TYPE(it)->tp_name); - Py_DECREF(it); - it = NULL; + Py_SETREF(it, NULL); } return it; } diff --git a/Objects/boolobject.c b/Objects/boolobject.c index 8a20e368d4a42b..18666f88cbde10 100644 --- a/Objects/boolobject.c +++ b/Objects/boolobject.c @@ -23,8 +23,7 @@ PyObject *PyBool_FromLong(long ok) result = Py_True; else result = Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /* We define bool_new to always return either Py_True or Py_False */ diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index b2962fd137d93e..072089e39aa207 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -313,8 +313,7 @@ bytearray_iconcat(PyByteArrayObject *self, PyObject *other) } memcpy(PyByteArray_AS_STRING(self) + size, vo.buf, vo.len); PyBuffer_Release(&vo); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -340,8 +339,7 @@ bytearray_irepeat(PyByteArrayObject *self, Py_ssize_t count) if (count < 0) count = 0; else if (count == 1) { - Py_INCREF(self); - return (PyObject*)self; + return Py_NewRef(self); } const Py_ssize_t mysize = Py_SIZE(self); @@ -354,8 +352,7 @@ bytearray_irepeat(PyByteArrayObject *self, Py_ssize_t count) char* buf = PyByteArray_AS_STRING(self); _PyBytes_Repeat(buf, size, buf, mysize); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -2015,7 +2012,7 @@ bytearray_join(PyByteArrayObject *self, PyObject *iterable_of_bytes) /*[clinic input] bytearray.splitlines - keepends: bool(accept={int}) = False + keepends: bool = False Return a list of the lines in the bytearray, breaking at line boundaries. @@ -2025,7 +2022,7 @@ true. static PyObject * bytearray_splitlines_impl(PyByteArrayObject *self, int keepends) -/*[clinic end generated code: output=4223c94b895f6ad9 input=99a27ad959b9cf6b]*/ +/*[clinic end generated code: output=4223c94b895f6ad9 input=66b2dcdea8d093bf]*/ { return stringlib_splitlines( (PyObject*) self, PyByteArray_AS_STRING(self), @@ -2154,10 +2151,9 @@ static PyObject * bytearray_sizeof_impl(PyByteArrayObject *self) /*[clinic end generated code: output=738abdd17951c427 input=e27320fd98a4bc5a]*/ { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(self)) + self->ob_alloc * sizeof(char); - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(self)); + res += (size_t)self->ob_alloc * sizeof(char); + return PyLong_FromSize_t(res); } static PySequenceMethods bytearray_as_sequence = { @@ -2477,8 +2473,7 @@ bytearray_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyByteArrayObject *)seq; + it->it_seq = (PyByteArrayObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 80660881920fb7..0fd10fa00d16fa 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -53,8 +53,7 @@ static inline PyObject* bytes_get_empty(void) // Return a strong reference to the empty bytes string singleton. static inline PyObject* bytes_new_empty(void) { - Py_INCREF(EMPTY); - return (PyObject *)EMPTY; + return Py_NewRef(EMPTY); } @@ -126,8 +125,7 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) } if (size == 1 && str != NULL) { op = CHARACTER(*str & 255); - Py_INCREF(op); - return (PyObject *)op; + return Py_NewRef(op); } if (size == 0) { return bytes_new_empty(); @@ -162,8 +160,7 @@ PyBytes_FromString(const char *str) } else if (size == 1) { op = CHARACTER(*str & 255); - Py_INCREF(op); - return (PyObject *)op; + return Py_NewRef(op); } /* Inline PyObject_NewVar */ @@ -527,14 +524,12 @@ format_obj(PyObject *v, const char **pbuf, Py_ssize_t *plen) if (PyBytes_Check(v)) { *pbuf = PyBytes_AS_STRING(v); *plen = PyBytes_GET_SIZE(v); - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (PyByteArray_Check(v)) { *pbuf = PyByteArray_AS_STRING(v); *plen = PyByteArray_GET_SIZE(v); - Py_INCREF(v); - return v; + return Py_NewRef(v); } /* does it support __bytes__? */ func = _PyObject_LookupSpecial(v, &_Py_ID(__bytes__)); @@ -1411,13 +1406,11 @@ bytes_concat(PyObject *a, PyObject *b) /* Optimize end cases */ if (va.len == 0 && PyBytes_CheckExact(b)) { - result = b; - Py_INCREF(result); + result = Py_NewRef(b); goto done; } if (vb.len == 0 && PyBytes_CheckExact(a)) { - result = a; - Py_INCREF(result); + result = Py_NewRef(a); goto done; } @@ -1458,8 +1451,7 @@ bytes_repeat(PyBytesObject *a, Py_ssize_t n) } size = Py_SIZE(a) * n; if (size == Py_SIZE(a) && PyBytes_CheckExact(a)) { - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } nbytes = (size_t)size; if (nbytes + PyBytesObject_SIZE <= nbytes) { @@ -1625,8 +1617,7 @@ bytes_subscript(PyBytesObject* self, PyObject* item) else if (start == 0 && step == 1 && slicelength == PyBytes_GET_SIZE(self) && PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else if (step == 1) { return PyBytes_FromStringAndSize( @@ -1696,8 +1687,7 @@ bytes___bytes___impl(PyBytesObject *self) /*[clinic end generated code: output=63a306a9bc0caac5 input=34ec5ddba98bd6bb]*/ { if (PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { return PyBytes_FromStringAndSize(self->ob_sval, Py_SIZE(self)); @@ -1922,8 +1912,7 @@ do_xstrip(PyBytesObject *self, int striptype, PyObject *sepobj) PyBuffer_Release(&vsep); if (i == 0 && j == len && PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject*)self; + return Py_NewRef(self); } else return PyBytes_FromStringAndSize(s+i, j-i); @@ -1952,8 +1941,7 @@ do_strip(PyBytesObject *self, int striptype) } if (i == 0 && j == len && PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject*)self; + return Py_NewRef(self); } else return PyBytes_FromStringAndSize(s+i, j-i); @@ -2121,9 +2109,7 @@ bytes_translate_impl(PyBytesObject *self, PyObject *table, changed = 1; } if (!changed && PyBytes_CheckExact(input_obj)) { - Py_INCREF(input_obj); - Py_DECREF(result); - result = input_obj; + Py_SETREF(result, Py_NewRef(input_obj)); } PyBuffer_Release(&del_table_view); PyBuffer_Release(&table_view); @@ -2152,8 +2138,7 @@ bytes_translate_impl(PyBytesObject *self, PyObject *table, } if (!changed && PyBytes_CheckExact(input_obj)) { Py_DECREF(result); - Py_INCREF(input_obj); - return input_obj; + return Py_NewRef(input_obj); } /* Fix the size of the resulting byte string */ if (inlen > 0) @@ -2245,8 +2230,7 @@ bytes_removeprefix_impl(PyBytesObject *self, Py_buffer *prefix) } if (PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } return PyBytes_FromStringAndSize(self_start, self_len); @@ -2284,8 +2268,7 @@ bytes_removesuffix_impl(PyBytesObject *self, Py_buffer *suffix) } if (PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } return PyBytes_FromStringAndSize(self_start, self_len); @@ -2331,7 +2314,7 @@ bytes_decode_impl(PyBytesObject *self, const char *encoding, /*[clinic input] bytes.splitlines - keepends: bool(accept={int}) = False + keepends: bool = False Return a list of the lines in the bytes, breaking at line boundaries. @@ -2341,7 +2324,7 @@ true. static PyObject * bytes_splitlines_impl(PyBytesObject *self, int keepends) -/*[clinic end generated code: output=3484149a5d880ffb input=a8b32eb01ff5a5ed]*/ +/*[clinic end generated code: output=3484149a5d880ffb input=5d7b898af2fe55c0]*/ { return stringlib_splitlines( (PyObject*) self, PyBytes_AS_STRING(self), @@ -2844,8 +2827,7 @@ PyBytes_FromObject(PyObject *x) } if (PyBytes_CheckExact(x)) { - Py_INCREF(x); - return x; + return Py_NewRef(x); } /* Use the modern buffer interface */ @@ -3269,8 +3251,7 @@ bytes_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyBytesObject *)seq; + it->it_seq = (PyBytesObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/call.c b/Objects/call.c index c2509db2a9a263..bd027e41f8a9a5 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -1,6 +1,7 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgsTstate() #include "pycore_ceval.h" // _Py_EnterRecursiveCallTstate() +#include "pycore_dict.h" // _PyDict_FromItems() #include "pycore_object.h" // _PyCFunctionWithKeywords_TrampolineCall() #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -1000,8 +1001,7 @@ _PyStack_UnpackDict(PyThreadState *tstate, /* Copy positional arguments */ for (Py_ssize_t i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - stack[i] = args[i]; + stack[i] = Py_NewRef(args[i]); } PyObject **kwstack = stack + nargs; @@ -1013,10 +1013,8 @@ _PyStack_UnpackDict(PyThreadState *tstate, unsigned long keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; while (PyDict_Next(kwargs, &pos, &key, &value)) { keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwstack[i] = value; + PyTuple_SET_ITEM(kwnames, i, Py_NewRef(key)); + kwstack[i] = Py_NewRef(value); i++; } diff --git a/Objects/capsule.c b/Objects/capsule.c index 606e50e6961133..baaddb3f1f0849 100644 --- a/Objects/capsule.c +++ b/Objects/capsule.c @@ -220,8 +220,7 @@ PyCapsule_Import(const char *name, int no_block) } } else { PyObject *object2 = PyObject_GetAttrString(object, trace); - Py_DECREF(object); - object = object2; + Py_SETREF(object, object2); } if (!object) { goto EXIT; diff --git a/Objects/cellobject.c b/Objects/cellobject.c index 1ddf4c5d8b8bdc..f516707f6f8086 100644 --- a/Objects/cellobject.c +++ b/Objects/cellobject.c @@ -11,8 +11,7 @@ PyCell_New(PyObject *obj) op = (PyCellObject *)PyObject_GC_New(PyCellObject, &PyCell_Type); if (op == NULL) return NULL; - op->ob_ref = obj; - Py_XINCREF(obj); + op->ob_ref = Py_XNewRef(obj); _PyObject_GC_TRACK(op); return (PyObject *)op; @@ -68,8 +67,7 @@ PyCell_Set(PyObject *op, PyObject *value) return -1; } PyObject *old_value = PyCell_GET(op); - Py_XINCREF(value); - PyCell_SET(op, value); + PyCell_SET(op, Py_XNewRef(value)); Py_XDECREF(old_value); return 0; } @@ -135,15 +133,13 @@ cell_get_contents(PyCellObject *op, void *closure) PyErr_SetString(PyExc_ValueError, "Cell is empty"); return NULL; } - Py_INCREF(op->ob_ref); - return op->ob_ref; + return Py_NewRef(op->ob_ref); } static int cell_set_contents(PyCellObject *op, PyObject *obj, void *Py_UNUSED(ignored)) { - Py_XINCREF(obj); - Py_XSETREF(op->ob_ref, obj); + Py_XSETREF(op->ob_ref, Py_XNewRef(obj)); return 0; } diff --git a/Objects/classobject.c b/Objects/classobject.c index b9708ba0e41a3b..2cb192e725d40d 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -113,10 +113,8 @@ PyMethod_New(PyObject *func, PyObject *self) return NULL; } im->im_weakreflist = NULL; - Py_INCREF(func); - im->im_func = func; - Py_INCREF(self); - im->im_self = self; + im->im_func = Py_NewRef(func); + im->im_self = Py_NewRef(self); im->vectorcall = method_vectorcall; _PyObject_GC_TRACK(im); return (PyObject *)im; @@ -195,8 +193,7 @@ method_getattro(PyObject *obj, PyObject *name) if (f != NULL) return f(descr, obj, (PyObject *)Py_TYPE(obj)); else { - Py_INCREF(descr); - return descr; + return Py_NewRef(descr); } } @@ -267,8 +264,7 @@ method_richcompare(PyObject *self, PyObject *other, int op) res = eq ? Py_True : Py_False; else res = eq ? Py_False : Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject * @@ -287,8 +283,7 @@ method_repr(PyMethodObject *a) } if (funcname != NULL && !PyUnicode_Check(funcname)) { - Py_DECREF(funcname); - funcname = NULL; + Py_SETREF(funcname, NULL); } /* XXX Shouldn't use repr()/%R here! */ @@ -359,8 +354,7 @@ PyInstanceMethod_New(PyObject *func) { method = PyObject_GC_New(PyInstanceMethodObject, &PyInstanceMethod_Type); if (method == NULL) return NULL; - Py_INCREF(func); - method->func = func; + method->func = Py_NewRef(func); _PyObject_GC_TRACK(method); return (PyObject *)method; } @@ -412,8 +406,7 @@ instancemethod_getattro(PyObject *self, PyObject *name) if (f != NULL) return f(descr, self, (PyObject *)Py_TYPE(self)); else { - Py_INCREF(descr); - return descr; + return Py_NewRef(descr); } } @@ -443,8 +436,7 @@ static PyObject * instancemethod_descr_get(PyObject *descr, PyObject *obj, PyObject *type) { PyObject *func = PyInstanceMethod_GET_FUNCTION(descr); if (obj == NULL) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } else return PyMethod_New(func, obj); @@ -472,8 +464,7 @@ instancemethod_richcompare(PyObject *self, PyObject *other, int op) res = eq ? Py_True : Py_False; else res = eq ? Py_False : Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject * @@ -492,8 +483,7 @@ instancemethod_repr(PyObject *self) return NULL; } if (funcname != NULL && !PyUnicode_Check(funcname)) { - Py_DECREF(funcname); - funcname = NULL; + Py_SETREF(funcname, NULL); } result = PyUnicode_FromFormat("<instancemethod %V at %p>", diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h index 142f2998160725..e7bf3183af8522 100644 --- a/Objects/clinic/bytearrayobject.c.h +++ b/Objects/clinic/bytearrayobject.c.h @@ -1084,8 +1084,8 @@ bytearray_splitlines(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t if (!noptargs) { goto skip_optional_pos; } - keepends = _PyLong_AsInt(args[0]); - if (keepends == -1 && PyErr_Occurred()) { + keepends = PyObject_IsTrue(args[0]); + if (keepends < 0) { goto exit; } skip_optional_pos: @@ -1287,4 +1287,4 @@ bytearray_sizeof(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) { return bytearray_sizeof_impl(self); } -/*[clinic end generated code: output=72bfa6cac2fd6832 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=022698e8b0faa272 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/bytesobject.c.h b/Objects/clinic/bytesobject.c.h index 904124ec479abb..060056dafbd84f 100644 --- a/Objects/clinic/bytesobject.c.h +++ b/Objects/clinic/bytesobject.c.h @@ -839,8 +839,8 @@ bytes_splitlines(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, P if (!noptargs) { goto skip_optional_pos; } - keepends = _PyLong_AsInt(args[0]); - if (keepends == -1 && PyErr_Occurred()) { + keepends = PyObject_IsTrue(args[0]); + if (keepends < 0) { goto exit; } skip_optional_pos: @@ -1063,4 +1063,4 @@ bytes_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=5e0a25b7ba749a04 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=31a9e4af85562612 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/listobject.c.h b/Objects/clinic/listobject.c.h index 926eaa5d36983b..94852e99617060 100644 --- a/Objects/clinic/listobject.c.h +++ b/Objects/clinic/listobject.c.h @@ -215,8 +215,8 @@ list_sort(PyListObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_kwonly; } } - reverse = _PyLong_AsInt(args[1]); - if (reverse == -1 && PyErr_Occurred()) { + reverse = PyObject_IsTrue(args[1]); + if (reverse < 0) { goto exit; } skip_optional_kwonly: @@ -382,4 +382,4 @@ list___reversed__(PyListObject *self, PyObject *Py_UNUSED(ignored)) { return list___reversed___impl(self); } -/*[clinic end generated code: output=782ed6c68b1c9f83 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4e6f38b655394564 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h index d803a2733bd636..f640c997577363 100644 --- a/Objects/clinic/unicodeobject.c.h +++ b/Objects/clinic/unicodeobject.c.h @@ -1193,8 +1193,8 @@ unicode_splitlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyOb if (!noptargs) { goto skip_optional_pos; } - keepends = _PyLong_AsInt(args[0]); - if (keepends == -1 && PyErr_Occurred()) { + keepends = PyObject_IsTrue(args[0]); + if (keepends < 0) { goto exit; } skip_optional_pos: @@ -1497,4 +1497,4 @@ unicode_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=e775ff4154f1c935 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=05d942840635dadf input=a9049054013a1b77]*/ diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 8920b1db2cadb7..e5d86e9d030adb 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -11,6 +11,72 @@ #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "clinic/codeobject.c.h" +static void +notify_code_watchers(PyCodeEvent event, PyCodeObject *co) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + assert(interp->_initialized); + uint8_t bits = interp->active_code_watchers; + int i = 0; + while (bits) { + assert(i < CODE_MAX_WATCHERS); + if (bits & 1) { + PyCode_WatchCallback cb = interp->code_watchers[i]; + // callback must be non-null if the watcher bit is set + assert(cb != NULL); + if (cb(event, co) < 0) { + PyErr_WriteUnraisable((PyObject *) co); + } + } + i++; + bits >>= 1; + } +} + +int +PyCode_AddWatcher(PyCode_WatchCallback callback) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + assert(interp->_initialized); + + for (int i = 0; i < CODE_MAX_WATCHERS; i++) { + if (!interp->code_watchers[i]) { + interp->code_watchers[i] = callback; + interp->active_code_watchers |= (1 << i); + return i; + } + } + + PyErr_SetString(PyExc_RuntimeError, "no more code watcher IDs available"); + return -1; +} + +static inline int +validate_watcher_id(PyInterpreterState *interp, int watcher_id) +{ + if (watcher_id < 0 || watcher_id >= CODE_MAX_WATCHERS) { + PyErr_Format(PyExc_ValueError, "Invalid code watcher ID %d", watcher_id); + return -1; + } + if (!interp->code_watchers[watcher_id]) { + PyErr_Format(PyExc_ValueError, "No code watcher set for ID %d", watcher_id); + return -1; + } + return 0; +} + +int +PyCode_ClearWatcher(int watcher_id) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + assert(interp->_initialized); + if (validate_watcher_id(interp, watcher_id) < 0) { + return -1; + } + interp->code_watchers[watcher_id] = NULL; + interp->active_code_watchers &= ~(1 << watcher_id); + return 0; +} /****************** * generic helpers @@ -175,8 +241,7 @@ void _Py_set_localsplus_info(int offset, PyObject *name, _PyLocals_Kind kind, PyObject *names, PyObject *kinds) { - Py_INCREF(name); - PyTuple_SET_ITEM(names, offset, name); + PyTuple_SET_ITEM(names, offset, Py_NewRef(name)); _PyLocals_SetKind(kinds, offset, kind); } @@ -235,8 +300,7 @@ get_localsplus_names(PyCodeObject *co, _PyLocals_Kind kind, int num) } assert(index < num); PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, offset); - Py_INCREF(name); - PyTuple_SET_ITEM(names, index, name); + PyTuple_SET_ITEM(names, index, Py_NewRef(name)); index += 1; } assert(index == num); @@ -301,6 +365,8 @@ _PyCode_Validate(struct _PyCodeConstructor *con) return 0; } +extern void _PyCode_Quicken(PyCodeObject *code); + static void init_code(PyCodeObject *co, struct _PyCodeConstructor *con) { @@ -309,27 +375,19 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) get_localsplus_counts(con->localsplusnames, con->localspluskinds, &nlocals, &nplaincellvars, &ncellvars, &nfreevars); - Py_INCREF(con->filename); - co->co_filename = con->filename; - Py_INCREF(con->name); - co->co_name = con->name; - Py_INCREF(con->qualname); - co->co_qualname = con->qualname; + co->co_filename = Py_NewRef(con->filename); + co->co_name = Py_NewRef(con->name); + co->co_qualname = Py_NewRef(con->qualname); co->co_flags = con->flags; co->co_firstlineno = con->firstlineno; - Py_INCREF(con->linetable); - co->co_linetable = con->linetable; + co->co_linetable = Py_NewRef(con->linetable); - Py_INCREF(con->consts); - co->co_consts = con->consts; - Py_INCREF(con->names); - co->co_names = con->names; + co->co_consts = Py_NewRef(con->consts); + co->co_names = Py_NewRef(con->names); - Py_INCREF(con->localsplusnames); - co->co_localsplusnames = con->localsplusnames; - Py_INCREF(con->localspluskinds); - co->co_localspluskinds = con->localspluskinds; + co->co_localsplusnames = Py_NewRef(con->localsplusnames); + co->co_localspluskinds = Py_NewRef(con->localspluskinds); co->co_argcount = con->argcount; co->co_posonlyargcount = con->posonlyargcount; @@ -337,23 +395,25 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) co->co_stacksize = con->stacksize; - Py_INCREF(con->exceptiontable); - co->co_exceptiontable = con->exceptiontable; + co->co_exceptiontable = Py_NewRef(con->exceptiontable); /* derived values */ co->co_nlocalsplus = nlocalsplus; co->co_nlocals = nlocals; - co->co_framesize = nlocalsplus + con->stacksize + FRAME_SPECIALS_SIZE; + int nconsts = (int)PyTuple_Size(co->co_consts); + co->co_framesize = nlocalsplus + con->stacksize + nconsts + FRAME_SPECIALS_SIZE; co->co_nplaincellvars = nplaincellvars; co->co_ncellvars = ncellvars; co->co_nfreevars = nfreevars; - + co->co_version = _Py_next_func_version; + if (_Py_next_func_version != 0) { + _Py_next_func_version++; + } /* not set */ co->co_weakreflist = NULL; co->co_extra = NULL; co->_co_cached = NULL; - co->co_warmup = QUICKENING_INITIAL_WARMUP_VALUE; co->_co_linearray_entry_size = 0; co->_co_linearray = NULL; memcpy(_PyCode_CODE(co), PyBytes_AS_STRING(con->code), @@ -361,9 +421,11 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) int entry_point = 0; while (entry_point < Py_SIZE(co) && _Py_OPCODE(_PyCode_CODE(co)[entry_point]) != RESUME) { - entry_point++; + entry_point += OPSIZE; } co->_co_firsttraceable = entry_point; + _PyCode_Quicken(co); + notify_code_watchers(PY_CODE_EVENT_CREATE, co); } static int @@ -656,10 +718,10 @@ PyCode_New(int argcount, int kwonlyargcount, // NOTE: When modifying the construction of PyCode_NewEmpty, please also change // test.test_code.CodeLocationTest.test_code_new_empty to keep it in sync! -static const uint8_t assert0[6] = { - RESUME, 0, - LOAD_ASSERTION_ERROR, 0, - RAISE_VARARGS, 1 +static const uint8_t assert0[12] = { + RESUME, 0, 0, 0, + LOAD_ASSERTION_ERROR, 0, 0, 0, + RAISE_VARARGS, 1, 0, 0, }; static const uint8_t linetable[2] = { @@ -691,7 +753,7 @@ PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno) if (filename_ob == NULL) { goto failed; } - code_ob = PyBytes_FromStringAndSize((const char *)assert0, 6); + code_ob = PyBytes_FromStringAndSize((const char *)assert0, 12); if (code_ob == NULL) { goto failed; } @@ -1142,8 +1204,7 @@ lineiter_next(lineiterator *li) start = PyLong_FromLong(bounds->ar_start); end = PyLong_FromLong(bounds->ar_end); if (bounds->ar_line < 0) { - Py_INCREF(Py_None); - line = Py_None; + line = Py_NewRef(Py_None); } else { line = PyLong_FromLong(bounds->ar_line); @@ -1213,8 +1274,7 @@ new_linesiterator(PyCodeObject *code) if (li == NULL) { return NULL; } - Py_INCREF(code); - li->li_code = code; + li->li_code = (PyCodeObject*)Py_NewRef(code); _PyCode_InitAddressRange(code, &li->li_line); return li; } @@ -1313,8 +1373,7 @@ code_positionsiterator(PyCodeObject* code, PyObject* Py_UNUSED(args)) if (pi == NULL) { return NULL; } - Py_INCREF(code); - pi->pi_code = code; + pi->pi_code = (PyCodeObject*)Py_NewRef(code); _PyCode_InitAddressRange(code, &pi->pi_range); pi->pi_offset = pi->pi_range.ar_end; return (PyObject*)pi; @@ -1469,9 +1528,16 @@ deopt_code(_Py_CODEUNIT *instructions, Py_ssize_t len) _Py_CODEUNIT instruction = instructions[i]; int opcode = _PyOpcode_Deopt[_Py_OPCODE(instruction)]; int caches = _PyOpcode_Caches[opcode]; - instructions[i] = _Py_MAKECODEUNIT(opcode, _Py_OPARG(instruction)); + instructions[i].opcode = opcode; + for (int k = 0; k < OPSIZE - 1; k++) { + /* oparg2, oparg3 */ + instructions[++i].opcode = 0; + instructions[i].oparg = 0; + + } while (caches--) { - instructions[++i] = _Py_MAKECODEUNIT(CACHE, 0); + instructions[++i].opcode = CACHE; + instructions[i].oparg = 0; } } } @@ -1627,6 +1693,8 @@ code_new_impl(PyTypeObject *type, int argcount, int posonlyargcount, static void code_dealloc(PyCodeObject *co) { + notify_code_watchers(PY_CODE_EVENT_DESTROY, co); + if (co->co_extra != NULL) { PyInterpreterState *interp = _PyInterpreterState_GET(); _PyCodeObjectExtra *co_extra = co->co_extra; @@ -1664,9 +1732,6 @@ code_dealloc(PyCodeObject *co) if (co->_co_linearray) { PyMem_Free(co->_co_linearray); } - if (co->co_warmup == 0) { - _Py_QuickenedCount--; - } PyObject_Free(co); } @@ -1725,9 +1790,9 @@ code_richcompare(PyObject *self, PyObject *other, int op) for (int i = 0; i < Py_SIZE(co); i++) { _Py_CODEUNIT co_instr = _PyCode_CODE(co)[i]; _Py_CODEUNIT cp_instr = _PyCode_CODE(cp)[i]; - _Py_SET_OPCODE(co_instr, _PyOpcode_Deopt[_Py_OPCODE(co_instr)]); - _Py_SET_OPCODE(cp_instr, _PyOpcode_Deopt[_Py_OPCODE(cp_instr)]); - eq = co_instr == cp_instr; + co_instr.opcode = _PyOpcode_Deopt[_Py_OPCODE(co_instr)]; + cp_instr.opcode =_PyOpcode_Deopt[_Py_OPCODE(cp_instr)]; + eq = co_instr.cache == cp_instr.cache; if (!eq) { goto unequal; } @@ -1778,8 +1843,7 @@ code_richcompare(PyObject *self, PyObject *other, int op) res = Py_False; done: - Py_INCREF(res); - return res; + return Py_NewRef(res); } static Py_hash_t @@ -1883,15 +1947,13 @@ static PyGetSetDef code_getsetlist[] = { static PyObject * code_sizeof(PyCodeObject *co, PyObject *Py_UNUSED(args)) { - Py_ssize_t res = _PyObject_VAR_SIZE(Py_TYPE(co), Py_SIZE(co)); - + size_t res = _PyObject_VAR_SIZE(Py_TYPE(co), Py_SIZE(co)); _PyCodeObjectExtra *co_extra = (_PyCodeObjectExtra*) co->co_extra; if (co_extra != NULL) { - res += sizeof(_PyCodeObjectExtra) + - (co_extra->ce_size-1) * sizeof(co_extra->ce_extras[0]); + res += sizeof(_PyCodeObjectExtra); + res += ((size_t)co_extra->ce_size - 1) * sizeof(co_extra->ce_extras[0]); } - - return PyLong_FromSsize_t(res); + return PyLong_FromSize_t(res); } static PyObject * @@ -2031,8 +2093,7 @@ code__varname_from_oparg_impl(PyCodeObject *self, int oparg) if (name == NULL) { return NULL; } - Py_INCREF(name); - return name; + return Py_NewRef(name); } /* XXX code objects need to participate in GC? */ @@ -2107,8 +2168,7 @@ _PyCode_ConstantKey(PyObject *op) { /* Objects of these types are always different from object of other * type and from tuples. */ - Py_INCREF(op); - key = op; + key = Py_NewRef(op); } else if (PyBool_Check(op) || PyBytes_CheckExact(op)) { /* Make booleans different from integers 0 and 1. @@ -2224,13 +2284,9 @@ _PyCode_ConstantKey(PyObject *op) } void -_PyStaticCode_Dealloc(PyCodeObject *co) +_PyStaticCode_Fini(PyCodeObject *co) { - if (co->co_warmup == 0) { - _Py_QuickenedCount--; - } deopt_code(_PyCode_CODE(co), Py_SIZE(co)); - co->co_warmup = QUICKENING_INITIAL_WARMUP_VALUE; PyMem_Free(co->co_extra); if (co->_co_cached != NULL) { Py_CLEAR(co->_co_cached->_co_code); @@ -2252,7 +2308,7 @@ _PyStaticCode_Dealloc(PyCodeObject *co) } int -_PyStaticCode_InternStrings(PyCodeObject *co) +_PyStaticCode_Init(PyCodeObject *co) { int res = intern_strings(co->co_names); if (res < 0) { @@ -2266,5 +2322,81 @@ _PyStaticCode_InternStrings(PyCodeObject *co) if (res < 0) { return -1; } + _PyCode_Quicken(co); return 0; } + +#define MAX_CODE_UNITS_PER_LOC_ENTRY 8 + +PyCodeObject * +_Py_MakeShimCode(const _PyShimCodeDef *codedef) +{ + PyObject *name = NULL; + PyObject *co_code = NULL; + PyObject *lines = NULL; + PyCodeObject *codeobj = NULL; + uint8_t *loc_table = NULL; + + name = _PyUnicode_FromASCII(codedef->cname, strlen(codedef->cname)); + if (name == NULL) { + goto cleanup; + } + co_code = PyBytes_FromStringAndSize( + (const char *)codedef->code, codedef->codelen); + if (co_code == NULL) { + goto cleanup; + } + int code_units = codedef->codelen / sizeof(_Py_CODEUNIT); + int loc_entries = (code_units + MAX_CODE_UNITS_PER_LOC_ENTRY - 1) / + MAX_CODE_UNITS_PER_LOC_ENTRY; + loc_table = PyMem_Malloc(loc_entries); + if (loc_table == NULL) { + PyErr_NoMemory(); + goto cleanup; + } + for (int i = 0; i < loc_entries-1; i++) { + loc_table[i] = 0x80 | (PY_CODE_LOCATION_INFO_NONE << 3) | 7; + code_units -= MAX_CODE_UNITS_PER_LOC_ENTRY; + } + assert(loc_entries > 0); + assert(code_units > 0 && code_units <= MAX_CODE_UNITS_PER_LOC_ENTRY); + loc_table[loc_entries-1] = 0x80 | + (PY_CODE_LOCATION_INFO_NONE << 3) | (code_units-1); + lines = PyBytes_FromStringAndSize((const char *)loc_table, loc_entries); + PyMem_Free(loc_table); + if (lines == NULL) { + goto cleanup; + } + _Py_DECLARE_STR(shim_name, "<shim>"); + struct _PyCodeConstructor con = { + .filename = &_Py_STR(shim_name), + .name = name, + .qualname = name, + .flags = CO_NEWLOCALS | CO_OPTIMIZED, + + .code = co_code, + .firstlineno = 1, + .linetable = lines, + + .consts = (PyObject *)&_Py_SINGLETON(tuple_empty), + .names = (PyObject *)&_Py_SINGLETON(tuple_empty), + + .localsplusnames = (PyObject *)&_Py_SINGLETON(tuple_empty), + .localspluskinds = (PyObject *)&_Py_SINGLETON(bytes_empty), + + .argcount = 0, + .posonlyargcount = 0, + .kwonlyargcount = 0, + + .stacksize = codedef->stacksize, + + .exceptiontable = (PyObject *)&_Py_SINGLETON(bytes_empty), + }; + + codeobj = _PyCode_New(&con); +cleanup: + Py_XDECREF(name); + Py_XDECREF(co_code); + Py_XDECREF(lines); + return codeobj; +} diff --git a/Objects/complexobject.c b/Objects/complexobject.c index 9bd68d50c30ae0..aee03ddfb07aec 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -449,8 +449,7 @@ to_complex(PyObject **pobj, Py_complex *pc) pc->real = PyFloat_AsDouble(obj); return 0; } - Py_INCREF(Py_NotImplemented); - *pobj = Py_NotImplemented; + *pobj = Py_NewRef(Py_NotImplemented); return -1; } @@ -553,8 +552,7 @@ static PyObject * complex_pos(PyComplexObject *v) { if (PyComplex_CheckExact(v)) { - Py_INCREF(v); - return (PyObject *)v; + return Py_NewRef(v); } else return PyComplex_FromCComplex(v->cval); @@ -631,8 +629,7 @@ complex_richcompare(PyObject *v, PyObject *w, int op) else res = Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); Unimplemented: Py_RETURN_NOTIMPLEMENTED; @@ -705,8 +702,7 @@ complex___complex___impl(PyComplexObject *self) /*[clinic end generated code: output=e6b35ba3d275dc9c input=3589ada9d27db854]*/ { if (PyComplex_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { return PyComplex_FromCComplex(self->cval); @@ -917,8 +913,7 @@ complex_new_impl(PyTypeObject *type, PyObject *r, PyObject *i) to exact complexes here. If either the input or the output is a complex subclass, it will be handled below as a non-orthogonal vector. */ - Py_INCREF(r); - return r; + return Py_NewRef(r); } if (PyUnicode_Check(r)) { if (i != NULL) { diff --git a/Objects/descrobject.c b/Objects/descrobject.c index a2974f91aaaec3..c545b90c6283e1 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -17,7 +17,7 @@ class property "propertyobject *" "&PyProperty_Type" // see pycore_object.h #if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) #include <emscripten.h> -EM_JS(PyObject*, descr_set_trampoline_call, (setter set, PyObject *obj, PyObject *value, void *closure), { +EM_JS(int, descr_set_trampoline_call, (setter set, PyObject *obj, PyObject *value, void *closure), { return wasmTable.get(set)(obj, value, closure); }); @@ -622,8 +622,7 @@ descr_get_qualname(PyDescrObject *descr, void *Py_UNUSED(ignored)) { if (descr->d_qualname == NULL) descr->d_qualname = calculate_qualname(descr); - Py_XINCREF(descr->d_qualname); - return descr->d_qualname; + return Py_XNewRef(descr->d_qualname); } static PyObject * @@ -904,12 +903,10 @@ descr_new(PyTypeObject *descrtype, PyTypeObject *type, const char *name) descr = (PyDescrObject *)PyType_GenericAlloc(descrtype, 0); if (descr != NULL) { - Py_XINCREF(type); - descr->d_type = type; + descr->d_type = (PyTypeObject*)Py_XNewRef(type); descr->d_name = PyUnicode_InternFromString(name); if (descr->d_name == NULL) { - Py_DECREF(descr); - descr = NULL; + Py_SETREF(descr, NULL); } else { descr->d_qualname = NULL; @@ -1244,8 +1241,7 @@ mappingproxy_new_impl(PyTypeObject *type, PyObject *mapping) mappingproxy = PyObject_GC_New(mappingproxyobject, &PyDictProxy_Type); if (mappingproxy == NULL) return NULL; - Py_INCREF(mapping); - mappingproxy->mapping = mapping; + mappingproxy->mapping = Py_NewRef(mapping); _PyObject_GC_TRACK(mappingproxy); return (PyObject *)mappingproxy; } @@ -1260,8 +1256,7 @@ PyDictProxy_New(PyObject *mapping) pp = PyObject_GC_New(mappingproxyobject, &PyDictProxy_Type); if (pp != NULL) { - Py_INCREF(mapping); - pp->mapping = mapping; + pp->mapping = Py_NewRef(mapping); _PyObject_GC_TRACK(pp); } return (PyObject *)pp; @@ -1361,8 +1356,7 @@ wrapper_objclass(wrapperobject *wp, void *Py_UNUSED(ignored)) { PyObject *c = (PyObject *)PyDescr_TYPE(wp->descr); - Py_INCREF(c); - return c; + return Py_NewRef(c); } static PyObject * @@ -1466,10 +1460,8 @@ PyWrapper_New(PyObject *d, PyObject *self) wp = PyObject_GC_New(wrapperobject, &_PyMethodWrapper_Type); if (wp != NULL) { - Py_INCREF(descr); - wp->descr = descr; - Py_INCREF(self); - wp->self = self; + wp->descr = (PyWrapperDescrObject*)Py_NewRef(descr); + wp->self = Py_NewRef(self); _PyObject_GC_TRACK(wp); } return (PyObject *)wp; @@ -1566,8 +1558,7 @@ property_set_name(PyObject *self, PyObject *args) { propertyobject *prop = (propertyobject *)self; PyObject *name = PyTuple_GET_ITEM(args, 1); - Py_XINCREF(name); - Py_XSETREF(prop->prop_name, name); + Py_XSETREF(prop->prop_name, Py_XNewRef(name)); Py_RETURN_NONE; } @@ -1599,8 +1590,7 @@ static PyObject * property_descr_get(PyObject *self, PyObject *obj, PyObject *type) { if (obj == NULL || obj == Py_None) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } propertyobject *gs = (propertyobject *)self; @@ -1722,8 +1712,7 @@ property_copy(PyObject *old, PyObject *get, PyObject *set, PyObject *del) if (new == NULL) return NULL; - Py_XINCREF(pold->prop_name); - Py_XSETREF(((propertyobject *) new)->prop_name, pold->prop_name); + Py_XSETREF(((propertyobject *) new)->prop_name, Py_XNewRef(pold->prop_name)); return new; } @@ -1776,13 +1765,9 @@ property_init_impl(propertyobject *self, PyObject *fget, PyObject *fset, if (fdel == Py_None) fdel = NULL; - Py_XINCREF(fget); - Py_XINCREF(fset); - Py_XINCREF(fdel); - - Py_XSETREF(self->prop_get, fget); - Py_XSETREF(self->prop_set, fset); - Py_XSETREF(self->prop_del, fdel); + Py_XSETREF(self->prop_get, Py_XNewRef(fget)); + Py_XSETREF(self->prop_set, Py_XNewRef(fset)); + Py_XSETREF(self->prop_del, Py_XNewRef(fdel)); Py_XSETREF(self->prop_doc, NULL); Py_XSETREF(self->prop_name, NULL); @@ -1790,8 +1775,7 @@ property_init_impl(propertyobject *self, PyObject *fget, PyObject *fset, PyObject *prop_doc = NULL; if (doc != NULL && doc != Py_None) { - prop_doc = doc; - Py_XINCREF(prop_doc); + prop_doc = Py_XNewRef(doc); } /* if no docstring given and the getter has one, use that one */ else if (fget != NULL) { @@ -1820,8 +1804,7 @@ property_init_impl(propertyobject *self, PyObject *fget, PyObject *fset, class's dict. */ if (prop_doc == NULL) { - prop_doc = Py_None; - Py_INCREF(prop_doc); + prop_doc = Py_NewRef(Py_None); } int err = PyObject_SetAttr( (PyObject *)self, &_Py_ID(__doc__), prop_doc); diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 97007479b1be91..b9067213820b52 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -236,11 +236,6 @@ static int dictresize(PyDictObject *mp, uint8_t log_newsize, int unicode); static PyObject* dict_iter(PyDictObject *dict); -/*Global counter used to set ma_version_tag field of dictionary. - * It is incremented each time that a dictionary is created and each - * time that a dictionary is modified. */ -uint64_t _pydict_global_version = 0; - #include "clinic/dictobject.c.h" @@ -690,9 +685,9 @@ free_keys_object(PyDictKeysObject *keys) } static inline PyDictValues* -new_values(Py_ssize_t size) +new_values(size_t size) { - assert(size > 0); + assert(size >= 1); size_t prefix_size = _Py_SIZE_ROUND_UP(size+2, sizeof(PyObject *)); assert(prefix_size < 256); size_t n = prefix_size + size * sizeof(PyObject *); @@ -751,27 +746,24 @@ new_dict(PyDictKeysObject *keys, PyDictValues *values, Py_ssize_t used, int free return (PyObject *)mp; } -static inline Py_ssize_t +static inline size_t shared_keys_usable_size(PyDictKeysObject *keys) { - return keys->dk_nentries + keys->dk_usable; + return (size_t)keys->dk_nentries + (size_t)keys->dk_usable; } /* Consumes a reference to the keys object */ static PyObject * new_dict_with_shared_keys(PyDictKeysObject *keys) { - PyDictValues *values; - Py_ssize_t i, size; - - size = shared_keys_usable_size(keys); - values = new_values(size); + size_t size = shared_keys_usable_size(keys); + PyDictValues *values = new_values(size); if (values == NULL) { dictkeys_decref(keys); return PyErr_NoMemory(); } ((char *)values)[-2] = 0; - for (i = 0; i < size; i++) { + for (size_t i = 0; i < size; i++) { values->values[i] = NULL; } return new_dict(keys, values, 0, 1); @@ -786,7 +778,7 @@ clone_combined_dict_keys(PyDictObject *orig) assert(orig->ma_values == NULL); assert(orig->ma_keys->dk_refcnt == 1); - Py_ssize_t keys_size = _PyDict_KeysSize(orig->ma_keys); + size_t keys_size = _PyDict_KeysSize(orig->ma_keys); PyDictKeysObject *keys = PyObject_Malloc(keys_size); if (keys == NULL) { PyErr_NoMemory(); @@ -1200,7 +1192,6 @@ insert_into_dictkeys(PyDictKeysObject *keys, PyObject *name) if (keys->dk_usable <= 0) { return DKIX_EMPTY; } - Py_INCREF(name); /* Insert into new slot. */ keys->dk_version = 0; Py_ssize_t hashpos = find_empty_slot(keys, hash); @@ -1208,7 +1199,7 @@ insert_into_dictkeys(PyDictKeysObject *keys, PyObject *name) PyDictUnicodeEntry *ep = &DK_UNICODE_ENTRIES(keys)[ix]; dictkeys_set_index(keys, hashpos, ix); assert(ep->me_key == NULL); - ep->me_key = name; + ep->me_key = Py_NewRef(name); keys->dk_usable--; keys->dk_nentries++; } @@ -1453,8 +1444,7 @@ dictresize(PyDictObject *mp, uint8_t log2_newsize, int unicode) int index = get_index_from_order(mp, i); PyDictUnicodeEntry *ep = &oldentries[index]; assert(oldvalues->values[index] != NULL); - Py_INCREF(ep->me_key); - newentries[i].me_key = ep->me_key; + newentries[i].me_key = Py_NewRef(ep->me_key); newentries[i].me_hash = unicode_get_hash(ep->me_key); newentries[i].me_value = oldvalues->values[index]; } @@ -1467,8 +1457,7 @@ dictresize(PyDictObject *mp, uint8_t log2_newsize, int unicode) int index = get_index_from_order(mp, i); PyDictUnicodeEntry *ep = &oldentries[index]; assert(oldvalues->values[index] != NULL); - Py_INCREF(ep->me_key); - newentries[i].me_key = ep->me_key; + newentries[i].me_key = Py_NewRef(ep->me_key); newentries[i].me_value = oldvalues->values[index]; } build_indices_unicode(mp->ma_keys, newentries, numentries); @@ -1867,9 +1856,8 @@ PyDict_SetItem(PyObject *op, PyObject *key, PyObject *value) } assert(key); assert(value); - Py_INCREF(key); - Py_INCREF(value); - return _PyDict_SetItem_Take2((PyDictObject *)op, key, value); + return _PyDict_SetItem_Take2((PyDictObject *)op, + Py_NewRef(key), Py_NewRef(value)); } int @@ -1887,13 +1875,11 @@ _PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, assert(hash != -1); mp = (PyDictObject *)op; - Py_INCREF(key); - Py_INCREF(value); if (mp->ma_keys == Py_EMPTY_KEYS) { - return insert_to_emptydict(mp, key, hash, value); + return insert_to_emptydict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } /* insertdict() handles any resizing that might be necessary */ - return insertdict(mp, key, hash, value); + return insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } static void @@ -2185,8 +2171,7 @@ _PyDict_Pop_KnownHash(PyObject *dict, PyObject *key, Py_hash_t hash, PyObject *d if (mp->ma_used == 0) { if (deflt) { - Py_INCREF(deflt); - return deflt; + return Py_NewRef(deflt); } _PyErr_SetKeyError(key); return NULL; @@ -2196,16 +2181,14 @@ _PyDict_Pop_KnownHash(PyObject *dict, PyObject *key, Py_hash_t hash, PyObject *d return NULL; if (ix == DKIX_EMPTY || old_value == NULL) { if (deflt) { - Py_INCREF(deflt); - return deflt; + return Py_NewRef(deflt); } _PyErr_SetKeyError(key); return NULL; } assert(old_value != NULL); - Py_INCREF(old_value); uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_DELETED, mp, key, NULL); - delitem_common(mp, hash, ix, old_value, new_version); + delitem_common(mp, hash, ix, Py_NewRef(old_value), new_version); ASSERT_CONSISTENT(mp); return old_value; @@ -2218,8 +2201,7 @@ _PyDict_Pop(PyObject *dict, PyObject *key, PyObject *deflt) if (((PyDictObject *)dict)->ma_used == 0) { if (deflt) { - Py_INCREF(deflt); - return deflt; + return Py_NewRef(deflt); } _PyErr_SetKeyError(key); return NULL; @@ -2260,9 +2242,7 @@ _PyDict_FromKeys(PyObject *cls, PyObject *iterable, PyObject *value) } while (_PyDict_Next(iterable, &pos, &key, &oldvalue, &hash)) { - Py_INCREF(key); - Py_INCREF(value); - if (insertdict(mp, key, hash, value)) { + if (insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value))) { Py_DECREF(d); return NULL; } @@ -2281,9 +2261,7 @@ _PyDict_FromKeys(PyObject *cls, PyObject *iterable, PyObject *value) } while (_PySet_NextEntry(iterable, &pos, &key, &hash)) { - Py_INCREF(key); - Py_INCREF(value); - if (insertdict(mp, key, hash, value)) { + if (insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value))) { Py_DECREF(d); return NULL; } @@ -2489,8 +2467,7 @@ dict_subscript(PyDictObject *mp, PyObject *key) _PyErr_SetKeyError(key); return NULL; } - Py_INCREF(value); - return value; + return Py_NewRef(value); } static int @@ -2532,8 +2509,7 @@ dict_keys(PyDictObject *mp) PyObject *key; while (_PyDict_Next((PyObject*)mp, &pos, &key, NULL, NULL)) { assert(j < n); - Py_INCREF(key); - PyList_SET_ITEM(v, j, key); + PyList_SET_ITEM(v, j, Py_NewRef(key)); j++; } assert(j == n); @@ -2564,8 +2540,7 @@ dict_values(PyDictObject *mp) PyObject *value; while (_PyDict_Next((PyObject*)mp, &pos, NULL, &value, NULL)) { assert(j < n); - Py_INCREF(value); - PyList_SET_ITEM(v, j, value); + PyList_SET_ITEM(v, j, Py_NewRef(value)); j++; } assert(j == n); @@ -2610,10 +2585,8 @@ dict_items(PyDictObject *mp) while (_PyDict_Next((PyObject*)mp, &pos, &key, &value, NULL)) { assert(j < n); PyObject *item = PyList_GET_ITEM(v, j); - Py_INCREF(key); - PyTuple_SET_ITEM(item, 0, key); - Py_INCREF(value); - PyTuple_SET_ITEM(item, 1, value); + PyTuple_SET_ITEM(item, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(item, 1, Py_NewRef(value)); j++; } assert(j == n); @@ -2865,16 +2838,12 @@ dict_merge(PyObject *a, PyObject *b, int override) Py_INCREF(key); Py_INCREF(value); if (override == 1) { - Py_INCREF(key); - Py_INCREF(value); - err = insertdict(mp, key, hash, value); + err = insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } else { err = _PyDict_Contains_KnownHash(a, key, hash); if (err == 0) { - Py_INCREF(key); - Py_INCREF(value); - err = insertdict(mp, key, hash, value); + err = insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } else if (err > 0) { if (override != 0) { @@ -2987,7 +2956,6 @@ PyDict_Copy(PyObject *o) { PyObject *copy; PyDictObject *mp; - Py_ssize_t i, n; if (o == NULL || !PyDict_Check(o)) { PyErr_BadInternalCall(); @@ -3002,9 +2970,8 @@ PyDict_Copy(PyObject *o) if (_PyDict_HasSplitTable(mp)) { PyDictObject *split_copy; - Py_ssize_t size = shared_keys_usable_size(mp->ma_keys); - PyDictValues *newvalues; - newvalues = new_values(size); + size_t size = shared_keys_usable_size(mp->ma_keys); + PyDictValues *newvalues = new_values(size); if (newvalues == NULL) return PyErr_NoMemory(); split_copy = PyObject_GC_New(PyDictObject, &PyDict_Type); @@ -3019,10 +2986,9 @@ PyDict_Copy(PyObject *o) split_copy->ma_used = mp->ma_used; split_copy->ma_version_tag = DICT_NEXT_VERSION(); dictkeys_incref(mp->ma_keys); - for (i = 0, n = size; i < n; i++) { + for (size_t i = 0; i < size; i++) { PyObject *value = mp->ma_values->values[i]; - Py_XINCREF(value); - split_copy->ma_values->values[i] = value; + split_copy->ma_values->values[i] = Py_XNewRef(value); } if (_PyObject_GC_IS_TRACKED(mp)) _PyObject_GC_TRACK(split_copy); @@ -3197,8 +3163,7 @@ dict_richcompare(PyObject *v, PyObject *w, int op) } else res = Py_NotImplemented; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /*[clinic input] @@ -3263,8 +3228,7 @@ dict_get_impl(PyDictObject *self, PyObject *key, PyObject *default_value) if (ix == DKIX_EMPTY || val == NULL) { val = default_value; } - Py_INCREF(val); - return val; + return Py_NewRef(val); } PyObject * @@ -3286,9 +3250,8 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj) } if (mp->ma_keys == Py_EMPTY_KEYS) { - Py_INCREF(key); - Py_INCREF(defaultobj); - if (insert_to_emptydict(mp, key, hash, defaultobj) < 0) { + if (insert_to_emptydict(mp, Py_NewRef(key), hash, + Py_NewRef(defaultobj)) < 0) { return NULL; } return defaultobj; @@ -3318,26 +3281,24 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj) if (DK_IS_UNICODE(mp->ma_keys)) { assert(PyUnicode_CheckExact(key)); PyDictUnicodeEntry *ep = &DK_UNICODE_ENTRIES(mp->ma_keys)[mp->ma_keys->dk_nentries]; - ep->me_key = key; + ep->me_key = Py_NewRef(key); if (_PyDict_HasSplitTable(mp)) { Py_ssize_t index = (int)mp->ma_keys->dk_nentries; assert(index < SHARED_KEYS_MAX_SIZE); assert(mp->ma_values->values[index] == NULL); - mp->ma_values->values[index] = value; + mp->ma_values->values[index] = Py_NewRef(value); _PyDictValues_AddToInsertionOrder(mp->ma_values, index); } else { - ep->me_value = value; + ep->me_value = Py_NewRef(value); } } else { PyDictKeyEntry *ep = &DK_ENTRIES(mp->ma_keys)[mp->ma_keys->dk_nentries]; - ep->me_key = key; + ep->me_key = Py_NewRef(key); ep->me_hash = hash; - ep->me_value = value; + ep->me_value = Py_NewRef(value); } - Py_INCREF(key); - Py_INCREF(value); MAINTAIN_TRACKING(mp, key, value); mp->ma_used++; mp->ma_version_tag = new_version; @@ -3350,9 +3311,8 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj) value = defaultobj; assert(_PyDict_HasSplitTable(mp)); assert(mp->ma_values->values[ix] == NULL); - Py_INCREF(value); MAINTAIN_TRACKING(mp, key, value); - mp->ma_values->values[ix] = value; + mp->ma_values->values[ix] = Py_NewRef(value); _PyDictValues_AddToInsertionOrder(mp->ma_values, ix); mp->ma_used++; mp->ma_version_tag = new_version; @@ -3382,8 +3342,7 @@ dict_setdefault_impl(PyDictObject *self, PyObject *key, PyObject *val; val = PyDict_SetDefault((PyObject *)self, key, default_value); - Py_XINCREF(val); - return val; + return Py_XNewRef(val); } static PyObject * @@ -3550,9 +3509,7 @@ static PyObject *dictiter_new(PyDictObject *, PyTypeObject *); Py_ssize_t _PyDict_SizeOf(PyDictObject *mp) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(mp)); + size_t res = _PyObject_SIZE(Py_TYPE(mp)); if (mp->ma_values) { res += shared_keys_usable_size(mp->ma_keys) * sizeof(PyObject*); } @@ -3561,17 +3518,19 @@ _PyDict_SizeOf(PyDictObject *mp) if (mp->ma_keys->dk_refcnt == 1) { res += _PyDict_KeysSize(mp->ma_keys); } - return res; + assert(res <= (size_t)PY_SSIZE_T_MAX); + return (Py_ssize_t)res; } -Py_ssize_t +size_t _PyDict_KeysSize(PyDictKeysObject *keys) { - size_t es = keys->dk_kind == DICT_KEYS_GENERAL - ? sizeof(PyDictKeyEntry) : sizeof(PyDictUnicodeEntry); - return (sizeof(PyDictKeysObject) - + ((size_t)1 << keys->dk_log2_index_bytes) - + USABLE_FRACTION(DK_SIZE(keys)) * es); + size_t es = (keys->dk_kind == DICT_KEYS_GENERAL + ? sizeof(PyDictKeyEntry) : sizeof(PyDictUnicodeEntry)); + size_t size = sizeof(PyDictKeysObject); + size += (size_t)1 << keys->dk_log2_index_bytes; + size += USABLE_FRACTION((size_t)DK_SIZE(keys)) * es; + return size; } static PyObject * @@ -3603,8 +3562,7 @@ dict_ior(PyObject *self, PyObject *other) if (dict_update_arg(self, other)) { return NULL; } - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyDoc_STRVAR(getitem__doc__, @@ -3942,8 +3900,7 @@ dictiter_new(PyDictObject *dict, PyTypeObject *itertype) if (di == NULL) { return NULL; } - Py_INCREF(dict); - di->di_dict = dict; + di->di_dict = (PyDictObject*)Py_NewRef(dict); di->di_used = dict->ma_used; di->len = dict->ma_used; if (itertype == &PyDictRevIterKey_Type || @@ -4077,8 +4034,7 @@ dictiter_iternextkey(dictiterobject *di) } di->di_pos = i+1; di->len--; - Py_INCREF(key); - return key; + return Py_NewRef(key); fail: di->di_dict = NULL; @@ -4177,8 +4133,7 @@ dictiter_iternextvalue(dictiterobject *di) } di->di_pos = i+1; di->len--; - Py_INCREF(value); - return value; + return Py_NewRef(value); fail: di->di_dict = NULL; @@ -4280,14 +4235,12 @@ dictiter_iternextitem(dictiterobject *di) } di->di_pos = i+1; di->len--; - Py_INCREF(key); - Py_INCREF(value); result = di->di_result; if (Py_REFCNT(result) == 1) { PyObject *oldkey = PyTuple_GET_ITEM(result, 0); PyObject *oldvalue = PyTuple_GET_ITEM(result, 1); - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); Py_INCREF(result); Py_DECREF(oldkey); Py_DECREF(oldvalue); @@ -4301,8 +4254,8 @@ dictiter_iternextitem(dictiterobject *di) result = PyTuple_New(2); if (result == NULL) return NULL; - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); } return result; @@ -4406,22 +4359,18 @@ dictreviter_iternext(dictiterobject *di) di->len--; if (Py_IS_TYPE(di, &PyDictRevIterKey_Type)) { - Py_INCREF(key); - return key; + return Py_NewRef(key); } else if (Py_IS_TYPE(di, &PyDictRevIterValue_Type)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } else if (Py_IS_TYPE(di, &PyDictRevIterItem_Type)) { - Py_INCREF(key); - Py_INCREF(value); result = di->di_result; if (Py_REFCNT(result) == 1) { PyObject *oldkey = PyTuple_GET_ITEM(result, 0); PyObject *oldvalue = PyTuple_GET_ITEM(result, 1); - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); Py_INCREF(result); Py_DECREF(oldkey); Py_DECREF(oldvalue); @@ -4436,8 +4385,8 @@ dictreviter_iternext(dictiterobject *di) if (result == NULL) { return NULL; } - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); } return result; } @@ -4484,7 +4433,6 @@ dictiter_reduce(dictiterobject *di, PyObject *Py_UNUSED(ignored)) /* copy the iterator state */ dictiterobject tmp = *di; Py_XINCREF(tmp.di_dict); - PyObject *list = PySequence_List((PyObject*)&tmp); Py_XDECREF(tmp.di_dict); if (list == NULL) { @@ -4566,8 +4514,7 @@ _PyDictView_New(PyObject *dict, PyTypeObject *type) dv = PyObject_GC_New(_PyDictViewObject, type); if (dv == NULL) return NULL; - Py_INCREF(dict); - dv->dv_dict = (PyDictObject *)dict; + dv->dv_dict = (PyDictObject *)Py_NewRef(dict); _PyObject_GC_TRACK(dv); return (PyObject *)dv; } @@ -4678,8 +4625,7 @@ dictview_richcompare(PyObject *self, PyObject *other, int op) if (ok < 0) return NULL; result = ok ? Py_True : Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static PyObject * @@ -5335,16 +5281,15 @@ init_inline_values(PyObject *obj, PyTypeObject *tp) if (keys->dk_usable > 1) { keys->dk_usable--; } - Py_ssize_t size = shared_keys_usable_size(keys); - assert(size > 0); + size_t size = shared_keys_usable_size(keys); PyDictValues *values = new_values(size); if (values == NULL) { PyErr_NoMemory(); return -1; } - assert(((uint8_t *)values)[-1] >= size+2); + assert(((uint8_t *)values)[-1] >= (size + 2)); ((uint8_t *)values)[-2] = 0; - for (int i = 0; i < size; i++) { + for (size_t i = 0; i < size; i++) { values->values[i] = NULL; } _PyDictOrValues_SetValues(_PyObject_DictOrValuesPointer(obj), values); @@ -5384,7 +5329,8 @@ make_dict_from_instance_attributes(PyDictKeysObject *keys, PyDictValues *values) dictkeys_incref(keys); Py_ssize_t used = 0; Py_ssize_t track = 0; - for (Py_ssize_t i = 0; i < shared_keys_usable_size(keys); i++) { + size_t size = shared_keys_usable_size(keys); + for (size_t i = 0; i < size; i++) { PyObject *val = values->values[i]; if (val != NULL) { used += 1; @@ -5445,8 +5391,7 @@ _PyObject_StoreInstanceAttribute(PyObject *obj, PyDictValues *values, } } PyObject *old_value = values->values[ix]; - Py_XINCREF(value); - values->values[ix] = value; + values->values[ix] = Py_XNewRef(value); if (old_value == NULL) { if (value == NULL) { PyErr_Format(PyExc_AttributeError, @@ -5508,8 +5453,7 @@ _PyObject_GetInstanceAttribute(PyObject *obj, PyDictValues *values, return NULL; } PyObject *value = values->values[ix]; - Py_XINCREF(value); - return value; + return Py_XNewRef(value); } int @@ -5652,8 +5596,7 @@ PyObject_GenericGetDict(PyObject *obj, void *context) } } } - Py_XINCREF(dict); - return dict; + return Py_XNewRef(dict); } int @@ -5705,17 +5648,15 @@ _PyDictKeys_DecRef(PyDictKeysObject *keys) dictkeys_decref(keys); } -static uint32_t next_dict_keys_version = 2; - uint32_t _PyDictKeys_GetVersionForCurrentState(PyDictKeysObject *dictkeys) { if (dictkeys->dk_version != 0) { return dictkeys->dk_version; } - if (next_dict_keys_version == 0) { + if (_PyRuntime.dict_state.next_keys_version == 0) { return 0; } - uint32_t v = next_dict_keys_version++; + uint32_t v = _PyRuntime.dict_state.next_keys_version++; dictkeys->dk_version = v; return v; } @@ -5727,7 +5668,7 @@ validate_watcher_id(PyInterpreterState *interp, int watcher_id) PyErr_Format(PyExc_ValueError, "Invalid dict watcher ID %d", watcher_id); return -1; } - if (!interp->dict_watchers[watcher_id]) { + if (!interp->dict_state.watchers[watcher_id]) { PyErr_Format(PyExc_ValueError, "No dict watcher set for ID %d", watcher_id); return -1; } @@ -5770,8 +5711,8 @@ PyDict_AddWatcher(PyDict_WatchCallback callback) PyInterpreterState *interp = _PyInterpreterState_GET(); for (int i = 0; i < DICT_MAX_WATCHERS; i++) { - if (!interp->dict_watchers[i]) { - interp->dict_watchers[i] = callback; + if (!interp->dict_state.watchers[i]) { + interp->dict_state.watchers[i] = callback; return i; } } @@ -5787,7 +5728,7 @@ PyDict_ClearWatcher(int watcher_id) if (validate_watcher_id(interp, watcher_id)) { return -1; } - interp->dict_watchers[watcher_id] = NULL; + interp->dict_state.watchers[watcher_id] = NULL; return 0; } @@ -5801,7 +5742,7 @@ _PyDict_SendEvent(int watcher_bits, PyInterpreterState *interp = _PyInterpreterState_GET(); for (int i = 0; i < DICT_MAX_WATCHERS; i++) { if (watcher_bits & 1) { - PyDict_WatchCallback cb = interp->dict_watchers[i]; + PyDict_WatchCallback cb = interp->dict_state.watchers[i]; if (cb && (cb(event, (PyObject*)mp, key, value) < 0)) { // some dict modification paths (e.g. PyDict_Clear) can't raise, so we // can't propagate exceptions from dict watchers. diff --git a/Objects/enumobject.c b/Objects/enumobject.c index d84bac6f4c9af2..c9d90584c26b7d 100644 --- a/Objects/enumobject.c +++ b/Objects/enumobject.c @@ -389,8 +389,7 @@ reversed_new_impl(PyTypeObject *type, PyObject *seq) return NULL; ro->index = n-1; - Py_INCREF(seq); - ro->seq = seq; + ro->seq = Py_NewRef(seq); return (PyObject *)ro; } diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 4b4f31a209b669..db6f7d52804d6a 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -53,8 +53,7 @@ BaseException_new(PyTypeObject *type, PyObject *args, PyObject *kwds) self->suppress_context = 0; if (args) { - self->args = args; - Py_INCREF(args); + self->args = Py_NewRef(args); return (PyObject *)self; } @@ -73,9 +72,7 @@ BaseException_init(PyBaseExceptionObject *self, PyObject *args, PyObject *kwds) if (!_PyArg_NoKeywords(Py_TYPE(self)->tp_name, kwds)) return -1; - Py_INCREF(args); - Py_XSETREF(self->args, args); - + Py_XSETREF(self->args, Py_NewRef(args)); return 0; } @@ -185,8 +182,7 @@ BaseException_with_traceback(PyObject *self, PyObject *tb) { if (PyException_SetTraceback(self, tb)) return NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyDoc_STRVAR(with_traceback_doc, @@ -258,8 +254,7 @@ BaseException_get_args(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) if (self->args == NULL) { Py_RETURN_NONE; } - Py_INCREF(self->args); - return self->args; + return Py_NewRef(self->args); } static int @@ -283,8 +278,7 @@ BaseException_get_tb(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) if (self->traceback == NULL) { Py_RETURN_NONE; } - Py_INCREF(self->traceback); - return self->traceback; + return Py_NewRef(self->traceback); } static int @@ -300,8 +294,7 @@ BaseException_set_tb(PyBaseExceptionObject *self, PyObject *tb, void *Py_UNUSED( return -1; } - Py_INCREF(tb); - Py_XSETREF(self->traceback, tb); + Py_XSETREF(self->traceback, Py_NewRef(tb)); return 0; } @@ -380,8 +373,7 @@ PyObject * PyException_GetTraceback(PyObject *self) { PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); - Py_XINCREF(base_self->traceback); - return base_self->traceback; + return Py_XNewRef(base_self->traceback); } @@ -395,8 +387,7 @@ PyObject * PyException_GetCause(PyObject *self) { PyObject *cause = _PyBaseExceptionObject_cast(self)->cause; - Py_XINCREF(cause); - return cause; + return Py_XNewRef(cause); } /* Steals a reference to cause */ @@ -412,8 +403,7 @@ PyObject * PyException_GetContext(PyObject *self) { PyObject *context = _PyBaseExceptionObject_cast(self)->context; - Py_XINCREF(context); - return context; + return Py_XNewRef(context); } /* Steals a reference to context */ @@ -579,8 +569,7 @@ StopIteration_init(PyStopIterationObject *self, PyObject *args, PyObject *kwds) value = PyTuple_GET_ITEM(args, 0); else value = Py_None; - Py_INCREF(value); - self->value = value; + self->value = Py_NewRef(value); return 0; } @@ -633,12 +622,10 @@ SystemExit_init(PySystemExitObject *self, PyObject *args, PyObject *kwds) if (size == 0) return 0; if (size == 1) { - Py_INCREF(PyTuple_GET_ITEM(args, 0)); - Py_XSETREF(self->code, PyTuple_GET_ITEM(args, 0)); + Py_XSETREF(self->code, Py_NewRef(PyTuple_GET_ITEM(args, 0))); } else { /* size > 1 */ - Py_INCREF(args); - Py_XSETREF(self->code, args); + Py_XSETREF(self->code, Py_NewRef(args)); } return 0; } @@ -766,7 +753,19 @@ BaseExceptionGroup_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } } else { - /* Do nothing - we don't interfere with subclasses */ + /* user-defined subclass */ + if (nested_base_exceptions) { + int nonbase = PyObject_IsSubclass((PyObject*)cls, PyExc_Exception); + if (nonbase == -1) { + goto error; + } + else if (nonbase == 1) { + PyErr_Format(PyExc_TypeError, + "Cannot nest BaseExceptions in '%.200s'", + cls->tp_name); + goto error; + } + } } if (!cls) { @@ -962,11 +961,11 @@ typedef enum { EXCEPTION_GROUP_MATCH_BY_TYPE = 0, /* A PyFunction returning True for matching exceptions */ EXCEPTION_GROUP_MATCH_BY_PREDICATE = 1, - /* A set of leaf exceptions to include in the result. + /* A set of the IDs of leaf exceptions to include in the result. * This matcher type is used internally by the interpreter * to construct reraised exceptions. */ - EXCEPTION_GROUP_MATCH_INSTANCES = 2 + EXCEPTION_GROUP_MATCH_INSTANCE_IDS = 2 } _exceptiongroup_split_matcher_type; static int @@ -1024,10 +1023,16 @@ exceptiongroup_split_check_match(PyObject *exc, Py_DECREF(exc_matches); return is_true; } - case EXCEPTION_GROUP_MATCH_INSTANCES: { + case EXCEPTION_GROUP_MATCH_INSTANCE_IDS: { assert(PySet_Check(matcher_value)); if (!_PyBaseExceptionGroup_Check(exc)) { - return PySet_Contains(matcher_value, exc); + PyObject *exc_id = PyLong_FromVoidPtr(exc); + if (exc_id == NULL) { + return -1; + } + int res = PySet_Contains(matcher_value, exc_id); + Py_DECREF(exc_id); + return res; } return 0; } @@ -1212,32 +1217,35 @@ BaseExceptionGroup_subgroup(PyObject *self, PyObject *args) } static int -collect_exception_group_leaves(PyObject *exc, PyObject *leaves) +collect_exception_group_leaf_ids(PyObject *exc, PyObject *leaf_ids) { if (Py_IsNone(exc)) { return 0; } assert(PyExceptionInstance_Check(exc)); - assert(PySet_Check(leaves)); + assert(PySet_Check(leaf_ids)); - /* Add all leaf exceptions in exc to the leaves set */ + /* Add IDs of all leaf exceptions in exc to the leaf_ids set */ if (!_PyBaseExceptionGroup_Check(exc)) { - if (PySet_Add(leaves, exc) < 0) { + PyObject *exc_id = PyLong_FromVoidPtr(exc); + if (exc_id == NULL) { return -1; } - return 0; + int res = PySet_Add(leaf_ids, exc_id); + Py_DECREF(exc_id); + return res; } PyBaseExceptionGroupObject *eg = _PyBaseExceptionGroupObject_cast(exc); Py_ssize_t num_excs = PyTuple_GET_SIZE(eg->excs); /* recursive calls */ for (Py_ssize_t i = 0; i < num_excs; i++) { PyObject *e = PyTuple_GET_ITEM(eg->excs, i); - if (_Py_EnterRecursiveCall(" in collect_exception_group_leaves")) { + if (_Py_EnterRecursiveCall(" in collect_exception_group_leaf_ids")) { return -1; } - int res = collect_exception_group_leaves(e, leaves); + int res = collect_exception_group_leaf_ids(e, leaf_ids); _Py_LeaveRecursiveCall(); if (res < 0) { return -1; @@ -1258,8 +1266,8 @@ exception_group_projection(PyObject *eg, PyObject *keep) assert(_PyBaseExceptionGroup_Check(eg)); assert(PyList_CheckExact(keep)); - PyObject *leaves = PySet_New(NULL); - if (!leaves) { + PyObject *leaf_ids = PySet_New(NULL); + if (!leaf_ids) { return NULL; } @@ -1268,8 +1276,8 @@ exception_group_projection(PyObject *eg, PyObject *keep) PyObject *e = PyList_GET_ITEM(keep, i); assert(e != NULL); assert(_PyBaseExceptionGroup_Check(e)); - if (collect_exception_group_leaves(e, leaves) < 0) { - Py_DECREF(leaves); + if (collect_exception_group_leaf_ids(e, leaf_ids) < 0) { + Py_DECREF(leaf_ids); return NULL; } } @@ -1277,9 +1285,9 @@ exception_group_projection(PyObject *eg, PyObject *keep) _exceptiongroup_split_result split_result; bool construct_rest = false; int err = exceptiongroup_split_recursive( - eg, EXCEPTION_GROUP_MATCH_INSTANCES, leaves, + eg, EXCEPTION_GROUP_MATCH_INSTANCE_IDS, leaf_ids, construct_rest, &split_result); - Py_DECREF(leaves); + Py_DECREF(leaf_ids); if (err < 0) { return NULL; } @@ -1484,18 +1492,12 @@ ImportError_init(PyImportErrorObject *self, PyObject *args, PyObject *kwds) } Py_DECREF(empty_tuple); - Py_XINCREF(name); - Py_XSETREF(self->name, name); - - Py_XINCREF(path); - Py_XSETREF(self->path, path); - - Py_XINCREF(name_from); - Py_XSETREF(self->name_from, name_from); + Py_XSETREF(self->name, Py_XNewRef(name)); + Py_XSETREF(self->path, Py_XNewRef(path)); + Py_XSETREF(self->name_from, Py_XNewRef(name_from)); if (PyTuple_GET_SIZE(args) == 1) { - msg = PyTuple_GET_ITEM(args, 0); - Py_INCREF(msg); + msg = Py_NewRef(PyTuple_GET_ITEM(args, 0)); } Py_XSETREF(self->msg, msg); @@ -1534,8 +1536,7 @@ static PyObject * ImportError_str(PyImportErrorObject *self) { if (self->msg && PyUnicode_CheckExact(self->msg)) { - Py_INCREF(self->msg); - return self->msg; + return Py_NewRef(self->msg); } else { return BaseException_str((PyBaseExceptionObject *)self); @@ -1565,8 +1566,7 @@ ImportError_getstate(PyImportErrorObject *self) return dict; } else if (dict) { - Py_INCREF(dict); - return dict; + return Py_NewRef(dict); } else { Py_RETURN_NONE; @@ -1693,8 +1693,7 @@ oserror_parse_args(PyObject **p_args, PyTuple_SET_ITEM(newargs, 0, *myerrno); for (i = 1; i < nargs; i++) { PyObject *val = PyTuple_GET_ITEM(args, i); - Py_INCREF(val); - PyTuple_SET_ITEM(newargs, i, val); + PyTuple_SET_ITEM(newargs, i, Py_NewRef(val)); } Py_DECREF(args); args = *p_args = newargs; @@ -1729,12 +1728,10 @@ oserror_init(PyOSErrorObject *self, PyObject **p_args, return -1; } else { - Py_INCREF(filename); - self->filename = filename; + self->filename = Py_NewRef(filename); if (filename2 && filename2 != Py_None) { - Py_INCREF(filename2); - self->filename2 = filename2; + self->filename2 = Py_NewRef(filename2); } if (nargs >= 2 && nargs <= 5) { @@ -1749,15 +1746,10 @@ oserror_init(PyOSErrorObject *self, PyObject **p_args, } } } - Py_XINCREF(myerrno); - self->myerrno = myerrno; - - Py_XINCREF(strerror); - self->strerror = strerror; - + self->myerrno = Py_XNewRef(myerrno); + self->strerror = Py_XNewRef(strerror); #ifdef MS_WINDOWS - Py_XINCREF(winerror); - self->winerror = winerror; + self->winerror = Py_XNewRef(winerror); #endif /* Steals the reference to args */ @@ -1983,7 +1975,7 @@ static PyObject * OSError_reduce(PyOSErrorObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *args = self->args; - PyObject *res = NULL, *tmp; + PyObject *res = NULL; /* self->args is only the first two real arguments if there was a * file name given to OSError. */ @@ -1993,16 +1985,9 @@ OSError_reduce(PyOSErrorObject *self, PyObject *Py_UNUSED(ignored)) if (!args) return NULL; - tmp = PyTuple_GET_ITEM(self->args, 0); - Py_INCREF(tmp); - PyTuple_SET_ITEM(args, 0, tmp); - - tmp = PyTuple_GET_ITEM(self->args, 1); - Py_INCREF(tmp); - PyTuple_SET_ITEM(args, 1, tmp); - - Py_INCREF(self->filename); - PyTuple_SET_ITEM(args, 2, self->filename); + PyTuple_SET_ITEM(args, 0, Py_NewRef(PyTuple_GET_ITEM(self->args, 0))); + PyTuple_SET_ITEM(args, 1, Py_NewRef(PyTuple_GET_ITEM(self->args, 1))); + PyTuple_SET_ITEM(args, 2, Py_NewRef(self->filename)); if (self->filename2) { /* @@ -2010,12 +1995,10 @@ OSError_reduce(PyOSErrorObject *self, PyObject *Py_UNUSED(ignored)) * So, to recreate filename2, we need to pass in * winerror as well. */ - Py_INCREF(Py_None); - PyTuple_SET_ITEM(args, 3, Py_None); + PyTuple_SET_ITEM(args, 3, Py_NewRef(Py_None)); /* filename2 */ - Py_INCREF(self->filename2); - PyTuple_SET_ITEM(args, 4, self->filename2); + PyTuple_SET_ITEM(args, 4, Py_NewRef(self->filename2)); } } else Py_INCREF(args); @@ -2176,8 +2159,7 @@ NameError_init(PyNameErrorObject *self, PyObject *args, PyObject *kwds) } Py_DECREF(empty_tuple); - Py_XINCREF(name); - Py_XSETREF(self->name, name); + Py_XSETREF(self->name, Py_XNewRef(name)); return 0; } @@ -2251,11 +2233,8 @@ AttributeError_init(PyAttributeErrorObject *self, PyObject *args, PyObject *kwds } Py_DECREF(empty_tuple); - Py_XINCREF(name); - Py_XSETREF(self->name, name); - - Py_XINCREF(obj); - Py_XSETREF(self->obj, obj); + Py_XSETREF(self->name, Py_XNewRef(name)); + Py_XSETREF(self->obj, Py_XNewRef(obj)); return 0; } @@ -2313,8 +2292,7 @@ SyntaxError_init(PySyntaxErrorObject *self, PyObject *args, PyObject *kwds) return -1; if (lenargs >= 1) { - Py_INCREF(PyTuple_GET_ITEM(args, 0)); - Py_XSETREF(self->msg, PyTuple_GET_ITEM(args, 0)); + Py_XSETREF(self->msg, Py_NewRef(PyTuple_GET_ITEM(args, 0))); } if (lenargs == 2) { info = PyTuple_GET_ITEM(args, 1); @@ -2410,8 +2388,7 @@ my_basename(PyObject *name) return PyUnicode_Substring(name, offset, size); } else { - Py_INCREF(name); - return name; + return Py_NewRef(name); } } @@ -2563,8 +2540,7 @@ get_string(PyObject *attr, const char *name) PyErr_Format(PyExc_TypeError, "%.200s attribute must be bytes", name); return NULL; } - Py_INCREF(attr); - return attr; + return Py_NewRef(attr); } static PyObject * @@ -2580,8 +2556,7 @@ get_unicode(PyObject *attr, const char *name) "%.200s attribute must be unicode", name); return NULL; } - Py_INCREF(attr); - return attr; + return Py_NewRef(attr); } static int diff --git a/Objects/fileobject.c b/Objects/fileobject.c index ab67cd23cef3b3..e99e155f2b8c98 100644 --- a/Objects/fileobject.c +++ b/Objects/fileobject.c @@ -67,8 +67,7 @@ PyFile_GetLine(PyObject *f, int n) } if (result != NULL && !PyBytes_Check(result) && !PyUnicode_Check(result)) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); PyErr_SetString(PyExc_TypeError, "object.readline() returned non-string"); } @@ -77,8 +76,7 @@ PyFile_GetLine(PyObject *f, int n) const char *s = PyBytes_AS_STRING(result); Py_ssize_t len = PyBytes_GET_SIZE(result); if (len == 0) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); PyErr_SetString(PyExc_EOFError, "EOF when reading a line"); } @@ -88,24 +86,21 @@ PyFile_GetLine(PyObject *f, int n) else { PyObject *v; v = PyBytes_FromStringAndSize(s, len-1); - Py_DECREF(result); - result = v; + Py_SETREF(result, v); } } } if (n < 0 && result != NULL && PyUnicode_Check(result)) { Py_ssize_t len = PyUnicode_GET_LENGTH(result); if (len == 0) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); PyErr_SetString(PyExc_EOFError, "EOF when reading a line"); } else if (PyUnicode_READ_CHAR(result, len-1) == '\n') { PyObject *v; v = PyUnicode_Substring(result, 0, len-1); - Py_DECREF(result); - result = v; + Py_SETREF(result, v); } } return result; diff --git a/Objects/floatobject.c b/Objects/floatobject.c index c4353572d32d8e..912b742f797d24 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -371,8 +371,7 @@ convert_to_double(PyObject **v, double *dbl) } } else { - Py_INCREF(Py_NotImplemented); - *v = Py_NotImplemented; + *v = Py_NewRef(Py_NotImplemented); return -1; } return 0; @@ -532,20 +531,17 @@ float_richcompare(PyObject *v, PyObject *w, int op) temp = _PyLong_Lshift(ww, 1); if (temp == NULL) goto Error; - Py_DECREF(ww); - ww = temp; + Py_SETREF(ww, temp); temp = _PyLong_Lshift(vv, 1); if (temp == NULL) goto Error; - Py_DECREF(vv); - vv = temp; + Py_SETREF(vv, temp); temp = PyNumber_Or(vv, _PyLong_GetOne()); if (temp == NULL) goto Error; - Py_DECREF(vv); - vv = temp; + Py_SETREF(vv, temp); } r = PyObject_RichCompareBool(vv, ww, op); @@ -904,8 +900,7 @@ float_is_integer_impl(PyObject *self) PyExc_ValueError); return NULL; } - Py_INCREF(o); - return o; + return Py_NewRef(o); } /*[clinic input] @@ -1124,11 +1119,12 @@ float___round___impl(PyObject *self, PyObject *o_ndigits) static PyObject * float_float(PyObject *v) { - if (PyFloat_CheckExact(v)) - Py_INCREF(v); - else - v = PyFloat_FromDouble(((PyFloatObject *)v)->ob_fval); - return v; + if (PyFloat_CheckExact(v)) { + return Py_NewRef(v); + } + else { + return PyFloat_FromDouble(((PyFloatObject *)v)->ob_fval); + } } /*[clinic input] @@ -1724,12 +1720,14 @@ float___getnewargs___impl(PyObject *self) } /* this is for the benefit of the pack/unpack routines below */ +typedef enum _py_float_format_type float_format_type; +#define unknown_format _py_float_format_unknown +#define ieee_big_endian_format _py_float_format_ieee_big_endian +#define ieee_little_endian_format _py_float_format_ieee_little_endian -typedef enum { - unknown_format, ieee_big_endian_format, ieee_little_endian_format -} float_format_type; +#define float_format (_PyRuntime.float_state.float_format) +#define double_format (_PyRuntime.float_state.double_format) -static float_format_type double_format, float_format; /*[clinic input] @classmethod @@ -1930,13 +1928,9 @@ PyTypeObject PyFloat_Type = { .tp_vectorcall = (vectorcallfunc)float_vectorcall, }; -void -_PyFloat_InitState(PyInterpreterState *interp) +static void +_init_global_state(void) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - float_format_type detected_double_format, detected_float_format; /* We attempt to determine if this machine is using IEEE @@ -1986,6 +1980,15 @@ _PyFloat_InitState(PyInterpreterState *interp) float_format = detected_float_format; } +void +_PyFloat_InitState(PyInterpreterState *interp) +{ + if (!_Py_IsMainInterpreter(interp)) { + return; + } + _init_global_state(); +} + PyStatus _PyFloat_InitTypes(PyInterpreterState *interp) { diff --git a/Objects/frame_layout.md b/Objects/frame_layout.md index 11688f68e42e3d..2f95214db56498 100644 --- a/Objects/frame_layout.md +++ b/Objects/frame_layout.md @@ -9,10 +9,10 @@ results in poor locality of reference. In 3.11, rather than have these frames scattered about memory, as happens for heap-allocated objects, frames are allocated -contiguously in a per-thread stack. +contiguously in a per-thread stack. This improves performance significantly for two reasons: * It reduces allocation overhead to a pointer comparison and increment. -* Stack allocated data has the best possible locality and will always be in +* Stack allocated data has the best possible locality and will always be in CPU cache. Generator and coroutines still need heap allocated activation records, but @@ -63,7 +63,7 @@ We may implement this in the future. > In a contiguous stack, we would need to save one fewer registers, as the > top of the caller's activation record would be the same at the base of the -> callee's. However, since some activation records are kept on the heap we +> callee's. However, since some activation records are kept on the heap we > cannot do this. ### Generators and Coroutines @@ -85,7 +85,7 @@ and builtins, than strong references to both globals and builtins. ### Frame objects When creating a backtrace or when calling `sys._getframe()` the frame becomes -visible to Python code. When this happens a new `PyFrameObject` is created +visible to Python code. When this happens a new `PyFrameObject` is created and a strong reference to it placed in the `frame_obj` field of the specials section. The `frame_obj` field is initially `NULL`. @@ -104,7 +104,7 @@ Generator objects have a `_PyInterpreterFrame` embedded in them. This means that creating a generator requires only a single allocation, reducing allocation overhead and improving locality of reference. The embedded frame is linked into the per-thread frame when iterated or -awaited. +awaited. If a frame object associated with a generator outlives the generator, then the embedded `_PyInterpreterFrame` is copied into the frame object. @@ -119,4 +119,14 @@ Thus, some of the field names may be a bit misleading. For example the `f_globals` field has a `f_` prefix implying it belongs to the `PyFrameObject` struct, although it belongs to the `_PyInterpreterFrame` struct. -We may rationalize this naming scheme for 3.12. \ No newline at end of file +We may rationalize this naming scheme for 3.12. + + +### Shim frames + +On entry to `_PyEval_EvalFrameDefault()` a shim `_PyInterpreterFrame` is pushed. +This frame is stored on the C stack, and popped when `_PyEval_EvalFrameDefault()` +returns. This extra frame is inserted so that `RETURN_VALUE`, `YIELD_VALUE`, and +`RETURN_GENERATOR` do not need to check whether the current frame is the entry frame. +The shim frame points to a special code object containing the `INTERPRETER_EXIT` +instruction which cleans up the shim frame and returns. diff --git a/Objects/frameobject.c b/Objects/frameobject.c index dd69207571b538..84403325ced7f0 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -28,8 +28,7 @@ frame_getlocals(PyFrameObject *f, void *closure) if (PyFrame_FastToLocalsWithError(f) < 0) return NULL; PyObject *locals = f->f_frame->f_locals; - Py_INCREF(locals); - return locals; + return Py_NewRef(locals); } int @@ -73,8 +72,7 @@ frame_getglobals(PyFrameObject *f, void *closure) if (globals == NULL) { globals = Py_None; } - Py_INCREF(globals); - return globals; + return Py_NewRef(globals); } static PyObject * @@ -84,8 +82,7 @@ frame_getbuiltins(PyFrameObject *f, void *closure) if (builtins == NULL) { builtins = Py_None; } - Py_INCREF(builtins); - return builtins; + return Py_NewRef(builtins); } static PyObject * @@ -603,7 +600,7 @@ _PyFrame_GetState(PyFrameObject *frame) if (_PyInterpreterFrame_LASTI(frame->f_frame) < 0) { return FRAME_CREATED; } - switch (_PyOpcode_Deopt[_Py_OPCODE(*frame->f_frame->prev_instr)]) + switch (_Py_OPCODE(*frame->f_frame->prev_instr)) { case COPY_FREE_VARS: case MAKE_CELL: @@ -620,42 +617,6 @@ _PyFrame_GetState(PyFrameObject *frame) Py_UNREACHABLE(); } -static void -add_load_fast_null_checks(PyCodeObject *co) -{ - int changed = 0; - _Py_CODEUNIT *instructions = _PyCode_CODE(co); - for (Py_ssize_t i = 0; i < Py_SIZE(co); i++) { - int opcode = _Py_OPCODE(instructions[i]); - switch (opcode) { - case LOAD_FAST: - case LOAD_FAST__LOAD_FAST: - case LOAD_FAST__LOAD_CONST: - changed = 1; - _Py_SET_OPCODE(instructions[i], LOAD_FAST_CHECK); - break; - case LOAD_CONST__LOAD_FAST: - changed = 1; - _Py_SET_OPCODE(instructions[i], LOAD_CONST); - break; - case STORE_FAST__LOAD_FAST: - changed = 1; - _Py_SET_OPCODE(instructions[i], STORE_FAST); - break; - } - i += _PyOpcode_Caches[_PyOpcode_Deopt[opcode]]; - } - if (changed && co->_co_cached != NULL) { - // invalidate cached co_code object - Py_CLEAR(co->_co_cached->_co_code); - Py_CLEAR(co->_co_cached->_co_cellvars); - Py_CLEAR(co->_co_cached->_co_freevars); - Py_CLEAR(co->_co_cached->_co_varnames); - PyMem_Free(co->_co_cached); - co->_co_cached = NULL; - } -} - /* Setter for f_lineno - you can set f_lineno from within a trace function in * order to jump to a given line of code, subject to some restrictions. Most * lines are OK to jump to because they don't make any assumptions about the @@ -745,8 +706,6 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore return -1; } - add_load_fast_null_checks(f->f_frame->f_code); - /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this * should never overflow. */ int len = (int)Py_SIZE(f->f_frame->f_code); @@ -805,6 +764,31 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore PyErr_SetString(PyExc_ValueError, msg); return -1; } + // Populate any NULL locals that the compiler might have "proven" to exist + // in the new location. Rather than crashing or changing co_code, just bind + // None instead: + int unbound = 0; + for (int i = 0; i < f->f_frame->f_code->co_nlocalsplus; i++) { + // Counting every unbound local is overly-cautious, but a full flow + // analysis (like we do in the compiler) is probably too expensive: + unbound += f->f_frame->localsplus[i] == NULL; + } + if (unbound) { + const char *e = "assigning None to %d unbound local%s"; + const char *s = (unbound == 1) ? "" : "s"; + if (PyErr_WarnFormat(PyExc_RuntimeWarning, 0, e, unbound, s)) { + return -1; + } + // Do this in a second pass to avoid writing a bunch of Nones when + // warnings are being treated as errors and the previous bit raises: + for (int i = 0; i < f->f_frame->f_code->co_nlocalsplus; i++) { + if (f->f_frame->localsplus[i] == NULL) { + f->f_frame->localsplus[i] = Py_NewRef(Py_None); + unbound--; + } + } + assert(unbound == 0); + } if (state == FRAME_SUSPENDED) { /* Account for value popped by yield */ start_stack = pop_value(start_stack); @@ -836,13 +820,9 @@ static PyObject * frame_gettrace(PyFrameObject *f, void *closure) { PyObject* trace = f->f_trace; - if (trace == NULL) trace = Py_None; - - Py_INCREF(trace); - - return trace; + return Py_NewRef(trace); } static int @@ -851,9 +831,7 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure) if (v == Py_None) { v = NULL; } - Py_XINCREF(v); - Py_XSETREF(f->f_trace, v); - + Py_XSETREF(f->f_trace, Py_XNewRef(v)); return 0; } @@ -871,15 +849,6 @@ static PyGetSetDef frame_getsetlist[] = { {0} }; -/* Stack frames are allocated and deallocated at a considerable rate. - In an attempt to improve the speed of function calls, we maintain - a separate free list of stack frames (just like floats are - allocated in a special way -- see floatobject.c). When a stack - frame is on the free list, only the following members have a meaning: - ob_type == &Frametype - f_back next item on free list, or NULL -*/ - static void frame_dealloc(PyFrameObject *f) { @@ -977,7 +946,8 @@ frame_sizeof(PyFrameObject *f, PyObject *Py_UNUSED(ignored)) Py_ssize_t res; res = offsetof(PyFrameObject, _f_frame_data) + offsetof(_PyInterpreterFrame, localsplus); PyCodeObject *code = f->f_frame->f_code; - res += (code->co_nlocalsplus+code->co_stacksize) * sizeof(PyObject *); + int nconsts = (int)PyTuple_Size(code->co_consts); + res += (code->co_nlocalsplus + code->co_stacksize + nconsts) * sizeof(PyObject *); return PyLong_FromSsize_t(res); } @@ -1041,11 +1011,9 @@ PyTypeObject PyFrame_Type = { static void init_frame(_PyInterpreterFrame *frame, PyFunctionObject *func, PyObject *locals) { - /* _PyFrame_InitializeSpecials consumes reference to func */ - Py_INCREF(func); - Py_XINCREF(locals); PyCodeObject *code = (PyCodeObject *)func->func_code; - _PyFrame_InitializeSpecials(frame, func, locals, code); + _PyFrame_InitializeSpecials(frame, (PyFunctionObject*)Py_NewRef(func), + Py_XNewRef(locals), code); for (Py_ssize_t i = 0; i < code->co_nlocalsplus; i++) { frame->localsplus[i] = NULL; } @@ -1133,82 +1101,106 @@ _PyFrame_OpAlreadyRan(_PyInterpreterFrame *frame, int opcode, int oparg) return 0; } + +// Initialize frame free variables if needed +static void +frame_init_get_vars(_PyInterpreterFrame *frame) +{ + // COPY_FREE_VARS has no quickened forms, so no need to use _PyOpcode_Deopt + // here: + PyCodeObject *co = frame->f_code; + int lasti = _PyInterpreterFrame_LASTI(frame); + if (!(lasti < 0 && _Py_OPCODE(_PyCode_CODE(co)[0]) == COPY_FREE_VARS + && PyFunction_Check(frame->f_funcobj))) + { + /* Free vars are initialized */ + return; + } + + /* Free vars have not been initialized -- Do that */ + PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + int offset = co->co_nlocals + co->co_nplaincellvars; + for (int i = 0; i < co->co_nfreevars; ++i) { + PyObject *o = PyTuple_GET_ITEM(closure, i); + frame->localsplus[offset + i] = Py_NewRef(o); + } + // COPY_FREE_VARS doesn't have inline CACHEs, either: + frame->prev_instr = _PyCode_CODE(frame->f_code); +} + + +static int +frame_get_var(_PyInterpreterFrame *frame, PyCodeObject *co, int i, + PyObject **pvalue) +{ + _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); + + /* If the namespace is unoptimized, then one of the + following cases applies: + 1. It does not contain free variables, because it + uses import * or is a top-level namespace. + 2. It is a class namespace. + We don't want to accidentally copy free variables + into the locals dict used by the class. + */ + if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) { + return 0; + } + + PyObject *value = frame->localsplus[i]; + if (frame->stacktop) { + if (kind & CO_FAST_FREE) { + // The cell was set by COPY_FREE_VARS. + assert(value != NULL && PyCell_Check(value)); + value = PyCell_GET(value); + } + else if (kind & CO_FAST_CELL) { + // Note that no *_DEREF ops can happen before MAKE_CELL + // executes. So there's no need to duplicate the work + // that MAKE_CELL would otherwise do later, if it hasn't + // run yet. + if (value != NULL) { + if (PyCell_Check(value) && + _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) { + // (likely) MAKE_CELL must have executed already. + value = PyCell_GET(value); + } + // (likely) Otherwise it it is an arg (kind & CO_FAST_LOCAL), + // with the initial value set when the frame was created... + // (unlikely) ...or it was set to some initial value by + // an earlier call to PyFrame_LocalsToFast(). + } + } + } + else { + assert(value == NULL); + } + *pvalue = value; + return 1; +} + int -_PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) { +_PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) +{ /* Merge fast locals into f->f_locals */ - PyObject *locals; - PyObject **fast; - PyCodeObject *co; - locals = frame->f_locals; + PyObject *locals = frame->f_locals; if (locals == NULL) { locals = frame->f_locals = PyDict_New(); - if (locals == NULL) + if (locals == NULL) { return -1; - } - co = frame->f_code; - fast = _PyFrame_GetLocalsArray(frame); - // COPY_FREE_VARS has no quickened forms, so no need to use _PyOpcode_Deopt - // here: - int lasti = _PyInterpreterFrame_LASTI(frame); - if (lasti < 0 && _Py_OPCODE(_PyCode_CODE(co)[0]) == COPY_FREE_VARS - && PyFunction_Check(frame->f_funcobj)) - { - /* Free vars have not been initialized -- Do that */ - PyCodeObject *co = frame->f_code; - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; - int offset = co->co_nlocals + co->co_nplaincellvars; - for (int i = 0; i < co->co_nfreevars; ++i) { - PyObject *o = PyTuple_GET_ITEM(closure, i); - Py_INCREF(o); - frame->localsplus[offset + i] = o; } - // COPY_FREE_VARS doesn't have inline CACHEs, either: - frame->prev_instr = _PyCode_CODE(frame->f_code); } - for (int i = 0; i < co->co_nlocalsplus; i++) { - _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); - /* If the namespace is unoptimized, then one of the - following cases applies: - 1. It does not contain free variables, because it - uses import * or is a top-level namespace. - 2. It is a class namespace. - We don't want to accidentally copy free variables - into the locals dict used by the class. - */ - if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) { + frame_init_get_vars(frame); + + PyCodeObject *co = frame->f_code; + for (int i = 0; i < co->co_nlocalsplus; i++) { + PyObject *value; // borrowed reference + if (!frame_get_var(frame, co, i, &value)) { continue; } PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); - PyObject *value = fast[i]; - if (frame->stacktop) { - if (kind & CO_FAST_FREE) { - // The cell was set by COPY_FREE_VARS. - assert(value != NULL && PyCell_Check(value)); - value = PyCell_GET(value); - } - else if (kind & CO_FAST_CELL) { - // Note that no *_DEREF ops can happen before MAKE_CELL - // executes. So there's no need to duplicate the work - // that MAKE_CELL would otherwise do later, if it hasn't - // run yet. - if (value != NULL) { - if (PyCell_Check(value) && - _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) { - // (likely) MAKE_CELL must have executed already. - value = PyCell_GET(value); - } - // (likely) Otherwise it it is an arg (kind & CO_FAST_LOCAL), - // with the initial value set when the frame was created... - // (unlikely) ...or it was set to some initial value by - // an earlier call to PyFrame_LocalsToFast(). - } - } - } - else { - assert(value == NULL); - } if (value == NULL) { if (PyObject_DelItem(locals, name) != 0) { if (PyErr_ExceptionMatches(PyExc_KeyError)) { @@ -1228,6 +1220,54 @@ _PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) { return 0; } + +PyObject * +PyFrame_GetVar(PyFrameObject *frame_obj, PyObject *name) +{ + if (!PyUnicode_Check(name)) { + PyErr_Format(PyExc_TypeError, "name must be str, not %s", + Py_TYPE(name)->tp_name); + return NULL; + } + + _PyInterpreterFrame *frame = frame_obj->f_frame; + frame_init_get_vars(frame); + + PyCodeObject *co = frame->f_code; + for (int i = 0; i < co->co_nlocalsplus; i++) { + PyObject *var_name = PyTuple_GET_ITEM(co->co_localsplusnames, i); + if (!_PyUnicode_Equal(var_name, name)) { + continue; + } + + PyObject *value; // borrowed reference + if (!frame_get_var(frame, co, i, &value)) { + break; + } + if (value == NULL) { + break; + } + return Py_NewRef(value); + } + + PyErr_Format(PyExc_NameError, "variable %R does not exist", name); + return NULL; +} + + +PyObject * +PyFrame_GetVarString(PyFrameObject *frame, const char *name) +{ + PyObject *name_obj = PyUnicode_FromString(name); + if (name_obj == NULL) { + return NULL; + } + PyObject *value = PyFrame_GetVar(frame, name_obj); + Py_DECREF(name_obj); + return value; +} + + int PyFrame_FastToLocalsWithError(PyFrameObject *f) { @@ -1269,7 +1309,6 @@ _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear) } fast = _PyFrame_GetLocalsArray(frame); co = frame->f_code; - bool added_null_checks = false; PyErr_Fetch(&error_type, &error_value, &error_traceback); for (int i = 0; i < co->co_nlocalsplus; i++) { @@ -1289,10 +1328,6 @@ _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear) } } PyObject *oldvalue = fast[i]; - if (!added_null_checks && oldvalue != NULL && value == NULL) { - add_load_fast_null_checks(co); - added_null_checks = true; - } PyObject *cell = NULL; if (kind == CO_FAST_FREE) { // The cell was set when the frame was created from @@ -1313,14 +1348,22 @@ _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear) if (cell != NULL) { oldvalue = PyCell_GET(cell); if (value != oldvalue) { - Py_XINCREF(value); - PyCell_SET(cell, value); + PyCell_SET(cell, Py_XNewRef(value)); Py_XDECREF(oldvalue); } } else if (value != oldvalue) { - Py_XINCREF(value); - Py_XSETREF(fast[i], value); + if (value == NULL) { + // Probably can't delete this, since the compiler's flow + // analysis may have already "proven" that it exists here: + const char *e = "assigning None to unbound local %R"; + if (PyErr_WarnFormat(PyExc_RuntimeWarning, 0, e, name)) { + // It's okay if frame_obj is NULL, just try anyways: + PyErr_WriteUnraisable((PyObject *)frame->frame_obj); + } + value = Py_NewRef(Py_None); + } + Py_XSETREF(fast[i], Py_NewRef(value)); } Py_XDECREF(value); } @@ -1337,15 +1380,15 @@ PyFrame_LocalsToFast(PyFrameObject *f, int clear) } } - -int _PyFrame_IsEntryFrame(PyFrameObject *frame) +int +_PyFrame_IsEntryFrame(PyFrameObject *frame) { assert(frame != NULL); - assert(!_PyFrame_IsIncomplete(frame->f_frame)); - return frame->f_frame->is_entry; + _PyInterpreterFrame *f = frame->f_frame; + assert(!_PyFrame_IsIncomplete(f)); + return f->previous && f->previous->owner == FRAME_OWNED_BY_CSTACK; } - PyCodeObject * PyFrame_GetCode(PyFrameObject *frame) { @@ -1353,8 +1396,7 @@ PyFrame_GetCode(PyFrameObject *frame) assert(!_PyFrame_IsIncomplete(frame->f_frame)); PyCodeObject *code = frame->f_frame->f_code; assert(code != NULL); - Py_INCREF(code); - return code; + return (PyCodeObject*)Py_NewRef(code); } @@ -1373,8 +1415,7 @@ PyFrame_GetBack(PyFrameObject *frame) back = _PyFrame_GetFrameObject(prev); } } - Py_XINCREF(back); - return back; + return (PyFrameObject*)Py_XNewRef(back); } PyObject* @@ -1437,5 +1478,3 @@ _PyEval_BuiltinsFromGlobals(PyThreadState *tstate, PyObject *globals) return _PyEval_GetBuiltins(tstate); } - - diff --git a/Objects/funcobject.c b/Objects/funcobject.c index ccc6d0b52eab68..d5cf5b9277b3f1 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -3,12 +3,77 @@ #include "Python.h" #include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals() +#include "pycore_code.h" // _Py_next_func_version #include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "structmember.h" // PyMemberDef -static uint32_t next_func_version = 1; +static void +notify_func_watchers(PyInterpreterState *interp, PyFunction_WatchEvent event, + PyFunctionObject *func, PyObject *new_value) +{ + uint8_t bits = interp->active_func_watchers; + int i = 0; + while (bits) { + assert(i < FUNC_MAX_WATCHERS); + if (bits & 1) { + PyFunction_WatchCallback cb = interp->func_watchers[i]; + // callback must be non-null if the watcher bit is set + assert(cb != NULL); + if (cb(event, func, new_value) < 0) { + PyErr_WriteUnraisable((PyObject *) func); + } + } + i++; + bits >>= 1; + } +} + +static inline void +handle_func_event(PyFunction_WatchEvent event, PyFunctionObject *func, + PyObject *new_value) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + assert(interp->_initialized); + if (interp->active_func_watchers) { + notify_func_watchers(interp, event, func, new_value); + } +} +int +PyFunction_AddWatcher(PyFunction_WatchCallback callback) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + assert(interp->_initialized); + for (int i = 0; i < FUNC_MAX_WATCHERS; i++) { + if (interp->func_watchers[i] == NULL) { + interp->func_watchers[i] = callback; + interp->active_func_watchers |= (1 << i); + return i; + } + } + PyErr_SetString(PyExc_RuntimeError, "no more func watcher IDs available"); + return -1; +} + +int +PyFunction_ClearWatcher(int watcher_id) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (watcher_id < 0 || watcher_id >= FUNC_MAX_WATCHERS) { + PyErr_Format(PyExc_ValueError, "invalid func watcher ID %d", + watcher_id); + return -1; + } + if (!interp->func_watchers[watcher_id]) { + PyErr_Format(PyExc_ValueError, "no func watcher set for ID %d", + watcher_id); + return -1; + } + interp->func_watchers[watcher_id] = NULL; + interp->active_func_watchers &= ~(1 << watcher_id); + return 0; +} PyFunctionObject * _PyFunction_FromConstructor(PyFrameConstructor *constr) { @@ -17,22 +82,15 @@ _PyFunction_FromConstructor(PyFrameConstructor *constr) if (op == NULL) { return NULL; } - Py_INCREF(constr->fc_globals); - op->func_globals = constr->fc_globals; - Py_INCREF(constr->fc_builtins); - op->func_builtins = constr->fc_builtins; - Py_INCREF(constr->fc_name); - op->func_name = constr->fc_name; - Py_INCREF(constr->fc_qualname); - op->func_qualname = constr->fc_qualname; - Py_INCREF(constr->fc_code); - op->func_code = constr->fc_code; + op->func_globals = Py_NewRef(constr->fc_globals); + op->func_builtins = Py_NewRef(constr->fc_builtins); + op->func_name = Py_NewRef(constr->fc_name); + op->func_qualname = Py_NewRef(constr->fc_qualname); + op->func_code = Py_NewRef(constr->fc_code); op->func_defaults = NULL; op->func_kwdefaults = NULL; - Py_XINCREF(constr->fc_closure); - op->func_closure = constr->fc_closure; - Py_INCREF(Py_None); - op->func_doc = Py_None; + op->func_closure = Py_XNewRef(constr->fc_closure); + op->func_doc = Py_NewRef(Py_None); op->func_dict = NULL; op->func_weakreflist = NULL; op->func_module = NULL; @@ -40,6 +98,7 @@ _PyFunction_FromConstructor(PyFrameConstructor *constr) op->vectorcall = _PyFunction_Vectorcall; op->func_version = 0; _PyObject_GC_TRACK(op); + handle_func_event(PyFunction_EVENT_CREATE, op, NULL); return op; } @@ -52,12 +111,10 @@ PyFunction_NewWithQualName(PyObject *code, PyObject *globals, PyObject *qualname PyThreadState *tstate = _PyThreadState_GET(); - PyCodeObject *code_obj = (PyCodeObject *)code; - Py_INCREF(code_obj); + PyCodeObject *code_obj = (PyCodeObject *)Py_NewRef(code); - PyObject *name = code_obj->co_name; - assert(name != NULL); - Py_INCREF(name); + assert(code_obj->co_name != NULL); + PyObject *name = Py_NewRef(code_obj->co_name); if (!qualname) { qualname = code_obj->co_qualname; @@ -116,6 +173,7 @@ PyFunction_NewWithQualName(PyObject *code, PyObject *globals, PyObject *qualname op->vectorcall = _PyFunction_Vectorcall; op->func_version = 0; _PyObject_GC_TRACK(op); + handle_func_event(PyFunction_EVENT_CREATE, op, NULL); return (PyObject *)op; error: @@ -137,10 +195,10 @@ uint32_t _PyFunction_GetVersionForCurrentState(PyFunctionObject *func) if (func->vectorcall != _PyFunction_Vectorcall) { return 0; } - if (next_func_version == 0) { + if (_PyRuntime.func_state.next_version == 0) { return 0; } - uint32_t v = next_func_version++; + uint32_t v = _PyRuntime.func_state.next_version++; func->func_version = v; return v; } @@ -207,6 +265,8 @@ PyFunction_SetDefaults(PyObject *op, PyObject *defaults) PyErr_SetString(PyExc_SystemError, "non-tuple default args"); return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_DEFAULTS, + (PyFunctionObject *) op, defaults); ((PyFunctionObject *)op)->func_version = 0; Py_XSETREF(((PyFunctionObject *)op)->func_defaults, defaults); return 0; @@ -247,6 +307,8 @@ PyFunction_SetKwDefaults(PyObject *op, PyObject *defaults) "non-dict keyword only default args"); return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_KWDEFAULTS, + (PyFunctionObject *) op, defaults); ((PyFunctionObject *)op)->func_version = 0; Py_XSETREF(((PyFunctionObject *)op)->func_kwdefaults, defaults); return 0; @@ -367,8 +429,7 @@ func_get_code(PyFunctionObject *op, void *Py_UNUSED(ignored)) return NULL; } - Py_INCREF(op->func_code); - return op->func_code; + return Py_NewRef(op->func_code); } static int @@ -401,17 +462,16 @@ func_set_code(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored)) nclosure, nfree); return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_CODE, op, value); op->func_version = 0; - Py_INCREF(value); - Py_XSETREF(op->func_code, value); + Py_XSETREF(op->func_code, Py_NewRef(value)); return 0; } static PyObject * func_get_name(PyFunctionObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->func_name); - return op->func_name; + return Py_NewRef(op->func_name); } static int @@ -424,16 +484,14 @@ func_set_name(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored)) "__name__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->func_name, value); + Py_XSETREF(op->func_name, Py_NewRef(value)); return 0; } static PyObject * func_get_qualname(PyFunctionObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->func_qualname); - return op->func_qualname; + return Py_NewRef(op->func_qualname); } static int @@ -446,8 +504,7 @@ func_set_qualname(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored "__qualname__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->func_qualname, value); + Py_XSETREF(op->func_qualname, Py_NewRef(value)); return 0; } @@ -460,8 +517,7 @@ func_get_defaults(PyFunctionObject *op, void *Py_UNUSED(ignored)) if (op->func_defaults == NULL) { Py_RETURN_NONE; } - Py_INCREF(op->func_defaults); - return op->func_defaults; + return Py_NewRef(op->func_defaults); } static int @@ -486,9 +542,9 @@ func_set_defaults(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_DEFAULTS, op, value); op->func_version = 0; - Py_XINCREF(value); - Py_XSETREF(op->func_defaults, value); + Py_XSETREF(op->func_defaults, Py_XNewRef(value)); return 0; } @@ -502,8 +558,7 @@ func_get_kwdefaults(PyFunctionObject *op, void *Py_UNUSED(ignored)) if (op->func_kwdefaults == NULL) { Py_RETURN_NONE; } - Py_INCREF(op->func_kwdefaults); - return op->func_kwdefaults; + return Py_NewRef(op->func_kwdefaults); } static int @@ -528,9 +583,9 @@ func_set_kwdefaults(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignor return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_KWDEFAULTS, op, value); op->func_version = 0; - Py_XINCREF(value); - Py_XSETREF(op->func_kwdefaults, value); + Py_XSETREF(op->func_kwdefaults, Py_XNewRef(value)); return 0; } @@ -543,10 +598,7 @@ func_get_annotations(PyFunctionObject *op, void *Py_UNUSED(ignored)) return NULL; } PyObject *d = func_get_annotation_dict(op); - if (d) { - Py_INCREF(d); - } - return d; + return Py_XNewRef(d); } static int @@ -563,8 +615,7 @@ func_set_annotations(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(igno return -1; } op->func_version = 0; - Py_XINCREF(value); - Py_XSETREF(op->func_annotations, value); + Py_XSETREF(op->func_annotations, Py_XNewRef(value)); return 0; } @@ -674,16 +725,13 @@ func_new_impl(PyTypeObject *type, PyCodeObject *code, PyObject *globals, return NULL; } if (name != Py_None) { - Py_INCREF(name); - Py_SETREF(newfunc->func_name, name); + Py_SETREF(newfunc->func_name, Py_NewRef(name)); } if (defaults != Py_None) { - Py_INCREF(defaults); - newfunc->func_defaults = defaults; + newfunc->func_defaults = Py_NewRef(defaults); } if (closure != Py_None) { - Py_INCREF(closure); - newfunc->func_closure = closure; + newfunc->func_closure = Py_NewRef(closure); } return (PyObject *)newfunc; @@ -715,6 +763,7 @@ func_clear(PyFunctionObject *op) static void func_dealloc(PyFunctionObject *op) { + handle_func_event(PyFunction_EVENT_DESTROY, op, NULL); _PyObject_GC_UNTRACK(op); if (op->func_weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject *) op); @@ -757,8 +806,7 @@ static PyObject * func_descr_get(PyObject *func, PyObject *obj, PyObject *type) { if (obj == Py_None || obj == NULL) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } return PyMethod_New(func, obj); } @@ -927,8 +975,7 @@ cm_init(PyObject *self, PyObject *args, PyObject *kwds) return -1; if (!PyArg_UnpackTuple(args, "classmethod", 1, 1, &callable)) return -1; - Py_INCREF(callable); - Py_XSETREF(cm->cm_callable, callable); + Py_XSETREF(cm->cm_callable, Py_NewRef(callable)); if (functools_wraps((PyObject *)cm, cm->cm_callable) < 0) { return -1; @@ -1038,8 +1085,7 @@ PyClassMethod_New(PyObject *callable) classmethod *cm = (classmethod *) PyType_GenericAlloc(&PyClassMethod_Type, 0); if (cm != NULL) { - Py_INCREF(callable); - cm->cm_callable = callable; + cm->cm_callable = Py_NewRef(callable); } return (PyObject *)cm; } @@ -1104,8 +1150,7 @@ sm_descr_get(PyObject *self, PyObject *obj, PyObject *type) "uninitialized staticmethod object"); return NULL; } - Py_INCREF(sm->sm_callable); - return sm->sm_callable; + return Py_NewRef(sm->sm_callable); } static int @@ -1118,8 +1163,7 @@ sm_init(PyObject *self, PyObject *args, PyObject *kwds) return -1; if (!PyArg_UnpackTuple(args, "staticmethod", 1, 1, &callable)) return -1; - Py_INCREF(callable); - Py_XSETREF(sm->sm_callable, callable); + Py_XSETREF(sm->sm_callable, Py_NewRef(callable)); if (functools_wraps((PyObject *)sm, sm->sm_callable) < 0) { return -1; @@ -1234,8 +1278,7 @@ PyStaticMethod_New(PyObject *callable) staticmethod *sm = (staticmethod *) PyType_GenericAlloc(&PyStaticMethod_Type, 0); if (sm != NULL) { - Py_INCREF(callable); - sm->sm_callable = callable; + sm->sm_callable = Py_NewRef(callable); } return (PyObject *)sm; } diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c index 77acd1bc57a568..675fd496eefdaf 100644 --- a/Objects/genericaliasobject.c +++ b/Objects/genericaliasobject.c @@ -183,8 +183,7 @@ static int tuple_add(PyObject *self, Py_ssize_t len, PyObject *item) { if (tuple_index(self, len, item) < 0) { - Py_INCREF(item); - PyTuple_SET_ITEM(self, len, item); + PyTuple_SET_ITEM(self, len, Py_NewRef(item)); return 1; } return 0; @@ -201,8 +200,7 @@ tuple_extend(PyObject **dst, Py_ssize_t dstindex, assert(dstindex + count <= PyTuple_GET_SIZE(*dst)); for (Py_ssize_t i = 0; i < count; ++i) { PyObject *item = src[i]; - Py_INCREF(item); - PyTuple_SET_ITEM(*dst, dstindex + i, item); + PyTuple_SET_ITEM(*dst, dstindex + i, Py_NewRef(item)); } return dstindex + count; } @@ -304,8 +302,7 @@ subs_tvars(PyObject *obj, PyObject *params, continue; } } - Py_INCREF(arg); - PyTuple_SET_ITEM(subargs, j, arg); + PyTuple_SET_ITEM(subargs, j, Py_NewRef(arg)); j++; } assert(j == PyTuple_GET_SIZE(subargs)); @@ -347,8 +344,7 @@ _unpacked_tuple_args(PyObject *arg) ((gaobject *)arg)->origin == (PyObject *)&PyTuple_Type) { result = ((gaobject *)arg)->args; - Py_INCREF(result); - return result; + return Py_NewRef(result); } if (_PyObject_LookupAttr(arg, &_Py_ID(__typing_unpacked_tuple_args__), &result) > 0) { @@ -459,8 +455,7 @@ _Py_subs_parameters(PyObject *self, PyObject *args, PyObject *parameters, PyObje for (Py_ssize_t iarg = 0, jarg = 0; iarg < nargs; iarg++) { PyObject *arg = PyTuple_GET_ITEM(args, iarg); if (PyType_Check(arg)) { - Py_INCREF(arg); - PyTuple_SET_ITEM(newargs, jarg, arg); + PyTuple_SET_ITEM(newargs, jarg, Py_NewRef(arg)); jarg++; continue; } @@ -767,8 +762,7 @@ ga_parameters(PyObject *self, void *unused) return NULL; } } - Py_INCREF(alias->parameters); - return alias->parameters; + return Py_NewRef(alias->parameters); } static PyObject * @@ -776,8 +770,7 @@ ga_unpacked_tuple_args(PyObject *self, void *unused) { gaobject *alias = (gaobject *)self; if (alias->starred && alias->origin == (PyObject *)&PyTuple_Type) { - Py_INCREF(alias->args); - return alias->args; + return Py_NewRef(alias->args); } Py_RETURN_NONE; } @@ -803,8 +796,7 @@ setup_ga(gaobject *alias, PyObject *origin, PyObject *args) { Py_INCREF(args); } - Py_INCREF(origin); - alias->origin = origin; + alias->origin = Py_NewRef(origin); alias->args = args; alias->parameters = NULL; alias->weakreflist = NULL; diff --git a/Objects/genobject.c b/Objects/genobject.c index ad4fbed6d8d579..ac82fa79ef86ca 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -8,7 +8,6 @@ #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_genobject.h" // struct _Py_async_gen_state #include "pycore_object.h" // _PyObject_GC_UNTRACK() -#include "pycore_opcode.h" // _PyOpcode_Deopt #include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include "pycore_pystate.h" // _PyThreadState_GET() #include "structmember.h" // PyMemberDef @@ -195,8 +194,7 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, else if (arg && !exc) { /* `gen` is an exhausted generator: only return value if called from send(). */ - *presult = Py_None; - Py_INCREF(*presult); + *presult = Py_NewRef(Py_None); return PYGEN_RETURN; } return PYGEN_ERROR; @@ -205,12 +203,10 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, assert(gen->gi_frame_state < FRAME_EXECUTING); /* Push arg onto the frame's value stack */ result = arg ? arg : Py_None; - Py_INCREF(result); - _PyFrame_StackPush(frame, result); + _PyFrame_StackPush(frame, Py_NewRef(result)); - frame->previous = tstate->cframe->current_frame; - - gen->gi_exc_state.previous_item = tstate->exc_info; + _PyErr_StackItem *prev_exc_info = tstate->exc_info; + gen->gi_exc_state.previous_item = prev_exc_info; tstate->exc_info = &gen->gi_exc_state; if (exc) { @@ -221,17 +217,10 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, gen->gi_frame_state = FRAME_EXECUTING; EVAL_CALL_STAT_INC(EVAL_CALL_GENERATOR); result = _PyEval_EvalFrame(tstate, frame, exc); - if (gen->gi_frame_state == FRAME_EXECUTING) { - gen->gi_frame_state = FRAME_COMPLETED; - } - tstate->exc_info = gen->gi_exc_state.previous_item; - gen->gi_exc_state.previous_item = NULL; - - assert(tstate->cframe->current_frame == frame->previous); - /* Don't keep the reference to previous any longer than necessary. It - * may keep a chain of frames alive or it could create a reference - * cycle. */ - frame->previous = NULL; + assert(tstate->exc_info == prev_exc_info); + assert(gen->gi_exc_state.previous_item == NULL); + assert(gen->gi_frame_state != FRAME_EXECUTING); + assert(frame->previous == NULL); /* If the generator just returned (as opposed to yielding), signal * that the generator is exhausted. */ @@ -247,33 +236,16 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, } } else { - if (PyErr_ExceptionMatches(PyExc_StopIteration)) { - const char *msg = "generator raised StopIteration"; - if (PyCoro_CheckExact(gen)) { - msg = "coroutine raised StopIteration"; - } - else if (PyAsyncGen_CheckExact(gen)) { - msg = "async generator raised StopIteration"; - } - _PyErr_FormatFromCause(PyExc_RuntimeError, "%s", msg); - } - else if (PyAsyncGen_CheckExact(gen) && - PyErr_ExceptionMatches(PyExc_StopAsyncIteration)) - { - /* code in `gen` raised a StopAsyncIteration error: - raise a RuntimeError. - */ - const char *msg = "async generator raised StopAsyncIteration"; - _PyErr_FormatFromCause(PyExc_RuntimeError, "%s", msg); - } + assert(!PyErr_ExceptionMatches(PyExc_StopIteration)); + assert(!PyAsyncGen_CheckExact(gen) || + !PyErr_ExceptionMatches(PyExc_StopAsyncIteration)); } /* generator can't be rerun, so release the frame */ /* first clean reference cycle through stored exception traceback */ _PyErr_ClearExcState(&gen->gi_exc_state); - gen->gi_frame_state = FRAME_CLEARED; - _PyFrame_Clear(frame); + assert(gen->gi_frame_state == FRAME_CLEARED); *presult = result; return result ? PYGEN_RETURN : PYGEN_ERROR; } @@ -363,14 +335,13 @@ _PyGen_yf(PyGenObject *gen) assert(_Py_OPCODE(_PyCode_CODE(gen->gi_code)[0]) != SEND); return NULL; } - _Py_CODEUNIT next = frame->prev_instr[1]; - if (_PyOpcode_Deopt[_Py_OPCODE(next)] != RESUME || _Py_OPARG(next) < 2) + _Py_CODEUNIT next = frame->prev_instr[OPSIZE]; + if (_Py_OPCODE(next) != RESUME || _Py_OPARG(next) < 2) { /* Not in a yield from */ return NULL; } - yf = _PyFrame_StackPeek(frame); - Py_INCREF(yf); + yf = Py_NewRef(_PyFrame_StackPeek(frame)); } return yf; @@ -520,10 +491,8 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } else { /* Normalize to raise <class>, <instance> */ - Py_XDECREF(val); - val = typ; - typ = PyExceptionInstance_Class(typ); - Py_INCREF(typ); + Py_XSETREF(val, typ); + typ = Py_NewRef(PyExceptionInstance_Class(typ)); if (tb == NULL) /* Returns NULL if there's no traceback */ @@ -651,8 +620,7 @@ _PyGen_FetchStopIterationValue(PyObject **pvalue) if (ev) { /* exception will usually be normalised already */ if (PyObject_TypeCheck(ev, (PyTypeObject *) et)) { - value = ((PyStopIterationObject *)ev)->value; - Py_INCREF(value); + value = Py_NewRef(((PyStopIterationObject *)ev)->value); Py_DECREF(ev); } else if (et == PyExc_StopIteration && !PyTuple_Check(ev)) { /* Avoid normalisation and take ev as value. @@ -671,8 +639,7 @@ _PyGen_FetchStopIterationValue(PyObject **pvalue) PyErr_Restore(et, ev, tb); return -1; } - value = ((PyStopIterationObject *)ev)->value; - Py_INCREF(value); + value = Py_NewRef(((PyStopIterationObject *)ev)->value); Py_DECREF(ev); } } @@ -682,8 +649,7 @@ _PyGen_FetchStopIterationValue(PyObject **pvalue) return -1; } if (value == NULL) { - value = Py_None; - Py_INCREF(value); + value = Py_NewRef(Py_None); } *pvalue = value; return 0; @@ -699,8 +665,7 @@ gen_repr(PyGenObject *gen) static PyObject * gen_get_name(PyGenObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->gi_name); - return op->gi_name; + return Py_NewRef(op->gi_name); } static int @@ -713,16 +678,14 @@ gen_set_name(PyGenObject *op, PyObject *value, void *Py_UNUSED(ignored)) "__name__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->gi_name, value); + Py_XSETREF(op->gi_name, Py_NewRef(value)); return 0; } static PyObject * gen_get_qualname(PyGenObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->gi_qualname); - return op->gi_qualname; + return Py_NewRef(op->gi_qualname); } static int @@ -735,8 +698,7 @@ gen_set_qualname(PyGenObject *op, PyObject *value, void *Py_UNUSED(ignored)) "__qualname__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->gi_qualname, value); + Py_XSETREF(op->gi_qualname, Py_NewRef(value)); return 0; } @@ -807,7 +769,8 @@ gen_sizeof(PyGenObject *gen, PyObject *Py_UNUSED(ignored)) Py_ssize_t res; res = offsetof(PyGenObject, gi_iframe) + offsetof(_PyInterpreterFrame, localsplus); PyCodeObject *code = gen->gi_code; - res += (code->co_nlocalsplus+code->co_stacksize) * sizeof(PyObject *); + int nconsts = (int)PyTuple_Size(code->co_consts); + res += (code->co_nlocalsplus + code->co_stacksize + nconsts) * sizeof(PyObject *); return PyLong_FromSsize_t(res); } @@ -888,14 +851,14 @@ static PyObject * make_gen(PyTypeObject *type, PyFunctionObject *func) { PyCodeObject *code = (PyCodeObject *)func->func_code; - int slots = code->co_nlocalsplus + code->co_stacksize; + int nconsts = (int)PyTuple_Size(code->co_consts); + int slots = code->co_nlocalsplus + code->co_stacksize + nconsts; PyGenObject *gen = PyObject_GC_NewVar(PyGenObject, type, slots); if (gen == NULL) { return NULL; } gen->gi_frame_state = FRAME_CLEARED; - gen->gi_code = (PyCodeObject *)func->func_code; - Py_INCREF(gen->gi_code); + gen->gi_code = (PyCodeObject *)Py_NewRef(func->func_code); gen->gi_weakreflist = NULL; gen->gi_exc_state.exc_value = NULL; gen->gi_exc_state.previous_item = NULL; @@ -981,15 +944,13 @@ gen_new_with_qualname(PyTypeObject *type, PyFrameObject *f, gen->gi_exc_state.exc_value = NULL; gen->gi_exc_state.previous_item = NULL; if (name != NULL) - gen->gi_name = name; + gen->gi_name = Py_NewRef(name); else - gen->gi_name = gen->gi_code->co_name; - Py_INCREF(gen->gi_name); + gen->gi_name = Py_NewRef(gen->gi_code->co_name); if (qualname != NULL) - gen->gi_qualname = qualname; + gen->gi_qualname = Py_NewRef(qualname); else - gen->gi_qualname = gen->gi_code->co_qualname; - Py_INCREF(gen->gi_qualname); + gen->gi_qualname = Py_NewRef(gen->gi_code->co_qualname); _PyObject_GC_TRACK(gen); return (PyObject *)gen; } @@ -1041,8 +1002,7 @@ _PyCoro_GetAwaitableIter(PyObject *o) if (PyCoro_CheckExact(o) || gen_is_coroutine(o)) { /* 'o' is a coroutine. */ - Py_INCREF(o); - return o; + return Py_NewRef(o); } ot = Py_TYPE(o); @@ -1089,8 +1049,7 @@ coro_await(PyCoroObject *coro) if (cw == NULL) { return NULL; } - Py_INCREF(coro); - cw->cw_coroutine = coro; + cw->cw_coroutine = (PyCoroObject*)Py_NewRef(coro); _PyObject_GC_TRACK(cw); return (PyObject *)cw; } @@ -1460,8 +1419,7 @@ async_gen_init_hooks(PyAsyncGenObject *o) finalizer = tstate->async_gen_finalizer; if (finalizer) { - Py_INCREF(finalizer); - o->ag_origin_or_finalizer = finalizer; + o->ag_origin_or_finalizer = Py_NewRef(finalizer); } firstiter = tstate->async_gen_firstiter; @@ -1923,11 +1881,9 @@ async_gen_asend_new(PyAsyncGenObject *gen, PyObject *sendval) } } - Py_INCREF(gen); - o->ags_gen = gen; + o->ags_gen = (PyAsyncGenObject*)Py_NewRef(gen); - Py_XINCREF(sendval); - o->ags_sendval = sendval; + o->ags_sendval = Py_XNewRef(sendval); o->ags_state = AWAITABLE_STATE_INIT; @@ -2043,8 +1999,7 @@ _PyAsyncGenValueWrapperNew(PyObject *val) return NULL; } } - o->agw_val = val; - Py_INCREF(val); + o->agw_val = Py_NewRef(val); _PyObject_GC_TRACK((PyObject*)o); return (PyObject*)o; } @@ -2327,11 +2282,9 @@ async_gen_athrow_new(PyAsyncGenObject *gen, PyObject *args) if (o == NULL) { return NULL; } - o->agt_gen = gen; - o->agt_args = args; + o->agt_gen = (PyAsyncGenObject*)Py_NewRef(gen); + o->agt_args = Py_XNewRef(args); o->agt_state = AWAITABLE_STATE_INIT; - Py_INCREF(gen); - Py_XINCREF(args); _PyObject_GC_TRACK((PyObject*)o); return (PyObject*)o; } diff --git a/Objects/iterobject.c b/Objects/iterobject.c index 62c36146d64f69..cfd6d0a7c959c9 100644 --- a/Objects/iterobject.c +++ b/Objects/iterobject.c @@ -23,8 +23,7 @@ PySeqIter_New(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = seq; + it->it_seq = Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } @@ -183,10 +182,8 @@ PyCallIter_New(PyObject *callable, PyObject *sentinel) it = PyObject_GC_New(calliterobject, &PyCallIter_Type); if (it == NULL) return NULL; - Py_INCREF(callable); - it->it_callable = callable; - Py_INCREF(sentinel); - it->it_sentinel = sentinel; + it->it_callable = Py_NewRef(callable); + it->it_sentinel = Py_NewRef(sentinel); _PyObject_GC_TRACK(it); return (PyObject *)it; } @@ -496,10 +493,8 @@ PyAnextAwaitable_New(PyObject *awaitable, PyObject *default_value) if (anext == NULL) { return NULL; } - Py_INCREF(awaitable); - anext->wrapped = awaitable; - Py_INCREF(default_value); - anext->default_value = default_value; + anext->wrapped = Py_NewRef(awaitable); + anext->default_value = Py_NewRef(default_value); _PyObject_GC_TRACK(anext); return (PyObject *)anext; } diff --git a/Objects/listobject.c b/Objects/listobject.c index 2cb3de9f147cc7..1d32915b17a14b 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -299,8 +299,7 @@ ins1(PyListObject *self, Py_ssize_t where, PyObject *v) items = self->ob_item; for (i = n; --i >= where; ) items[i+1] = items[i]; - Py_INCREF(v); - items[where] = v; + items[where] = Py_NewRef(v); return 0; } @@ -332,8 +331,7 @@ int PyList_Append(PyObject *op, PyObject *newitem) { if (PyList_Check(op) && (newitem != NULL)) { - Py_INCREF(newitem); - return _PyList_AppendTakeRef((PyListObject *)op, newitem); + return _PyList_AppendTakeRef((PyListObject *)op, Py_NewRef(newitem)); } PyErr_BadInternalCall(); return -1; @@ -461,8 +459,7 @@ list_item(PyListObject *a, Py_ssize_t i) PyErr_SetObject(PyExc_IndexError, &_Py_STR(list_err)); return NULL; } - Py_INCREF(a->ob_item[i]); - return a->ob_item[i]; + return Py_NewRef(a->ob_item[i]); } static PyObject * @@ -483,8 +480,7 @@ list_slice(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh) dest = np->ob_item; for (i = 0; i < len; i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } Py_SET_SIZE(np, len); return (PyObject *)np; @@ -539,15 +535,13 @@ list_concat(PyListObject *a, PyObject *bb) dest = np->ob_item; for (i = 0; i < Py_SIZE(a); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } src = b->ob_item; dest = np->ob_item + Py_SIZE(a); for (i = 0; i < Py_SIZE(b); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } Py_SET_SIZE(np, size); return (PyObject *)np; @@ -716,8 +710,7 @@ list_ass_slice(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *v) } for (k = 0; k < n; k++, ilow++) { PyObject *w = vitem[k]; - Py_XINCREF(w); - item[ilow] = w; + item[ilow] = Py_XNewRef(w); } for (k = norig - 1; k >= 0; --k) Py_XDECREF(recycle[k]); @@ -745,14 +738,12 @@ list_inplace_repeat(PyListObject *self, Py_ssize_t n) { Py_ssize_t input_size = PyList_GET_SIZE(self); if (input_size == 0 || n == 1) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } if (n < 1) { (void)_list_clear(self); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } if (input_size > PY_SSIZE_T_MAX / n) { @@ -770,8 +761,7 @@ list_inplace_repeat(PyListObject *self, Py_ssize_t n) _Py_memory_repeat((char *)items, sizeof(PyObject *)*output_size, sizeof(PyObject *)*input_size); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static int @@ -784,8 +774,7 @@ list_ass_item(PyListObject *a, Py_ssize_t i, PyObject *v) } if (v == NULL) return list_ass_slice(a, i, i+1, v); - Py_INCREF(v); - Py_SETREF(a->ob_item[i], v); + Py_SETREF(a->ob_item[i], Py_NewRef(v)); return 0; } @@ -913,8 +902,7 @@ list_extend(PyListObject *self, PyObject *iterable) dest = self->ob_item + m; for (i = 0; i < n; i++) { PyObject *o = src[i]; - Py_INCREF(o); - dest[i] = o; + dest[i] = Py_NewRef(o); } Py_DECREF(iterable); Py_RETURN_NONE; @@ -1002,8 +990,7 @@ list_inplace_concat(PyListObject *self, PyObject *other) if (result == NULL) return result; Py_DECREF(result); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] @@ -2240,7 +2227,7 @@ list.sort * key as keyfunc: object = None - reverse: bool(accept={int}) = False + reverse: bool = False Sort the list in ascending order and return None. @@ -2255,7 +2242,7 @@ The reverse flag can be set to sort in descending order. static PyObject * list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse) -/*[clinic end generated code: output=57b9f9c5e23fbe42 input=cb56cd179a713060]*/ +/*[clinic end generated code: output=57b9f9c5e23fbe42 input=a74c4cd3ec6b5c08]*/ { MergeState ms; Py_ssize_t nremaining; @@ -2512,8 +2499,7 @@ list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse) } PyMem_Free(final_ob_item); } - Py_XINCREF(result); - return result; + return Py_XNewRef(result); } #undef IFLT #undef ISLT @@ -2820,10 +2806,9 @@ static PyObject * list___sizeof___impl(PyListObject *self) /*[clinic end generated code: output=3417541f95f9a53e input=b8030a5d5ce8a187]*/ { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(self)) + self->allocated * sizeof(void*); - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(self)); + res += (size_t)self->allocated * sizeof(void*); + return PyLong_FromSize_t(res); } static PyObject *list_iter(PyObject *seq); @@ -2901,8 +2886,7 @@ list_subscript(PyListObject* self, PyObject* item) dest = ((PyListObject *)result)->ob_item; for (cur = start, i = 0; i < slicelength; cur += (size_t)step, i++) { - it = src[cur]; - Py_INCREF(it); + it = Py_NewRef(src[cur]); dest[i] = it; } Py_SET_SIZE(result, slicelength); @@ -3057,8 +3041,7 @@ list_ass_subscript(PyListObject* self, PyObject* item, PyObject* value) for (cur = start, i = 0; i < slicelength; cur += (size_t)step, i++) { garbage[i] = selfitems[cur]; - ins = seqitems[i]; - Py_INCREF(ins); + ins = Py_NewRef(seqitems[i]); selfitems[cur] = ins; } @@ -3199,8 +3182,7 @@ list_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyListObject *)seq; + it->it_seq = (PyListObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } @@ -3235,8 +3217,7 @@ listiter_next(_PyListIterObject *it) if (it->it_index < PyList_GET_SIZE(seq)) { item = PyList_GET_ITEM(seq, it->it_index); ++it->it_index; - Py_INCREF(item); - return item; + return Py_NewRef(item); } it->it_seq = NULL; @@ -3350,8 +3331,7 @@ list___reversed___impl(PyListObject *self) return NULL; assert(PyList_Check(self)); it->it_index = PyList_GET_SIZE(self) - 1; - Py_INCREF(self); - it->it_seq = self; + it->it_seq = (PyListObject*)Py_NewRef(self); PyObject_GC_Track(it); return (PyObject *)it; } @@ -3389,8 +3369,7 @@ listreviter_next(listreviterobject *it) if (index>=0 && index < PyList_GET_SIZE(seq)) { item = PyList_GET_ITEM(seq, index); it->it_index--; - Py_INCREF(item); - return item; + return Py_NewRef(item); } it->it_index = -1; it->it_seq = NULL; diff --git a/Objects/longobject.c b/Objects/longobject.c index cf859ccb970730..8596ce9797b5a6 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -36,8 +36,8 @@ medium_value(PyLongObject *x) #define IS_SMALL_INT(ival) (-_PY_NSMALLNEGINTS <= (ival) && (ival) < _PY_NSMALLPOSINTS) #define IS_SMALL_UINT(ival) ((ival) < _PY_NSMALLPOSINTS) -#define _MAX_STR_DIGITS_ERROR_FMT_TO_INT "Exceeds the limit (%d) for integer string conversion: value has %zd digits; use sys.set_int_max_str_digits() to increase the limit" -#define _MAX_STR_DIGITS_ERROR_FMT_TO_STR "Exceeds the limit (%d) for integer string conversion; use sys.set_int_max_str_digits() to increase the limit" +#define _MAX_STR_DIGITS_ERROR_FMT_TO_INT "Exceeds the limit (%d digits) for integer string conversion: value has %zd digits; use sys.set_int_max_str_digits() to increase the limit" +#define _MAX_STR_DIGITS_ERROR_FMT_TO_STR "Exceeds the limit (%d digits) for integer string conversion; use sys.set_int_max_str_digits() to increase the limit" /* If defined, use algorithms from the _pylong.py module */ #define WITH_PYLONG_MODULE 1 @@ -62,8 +62,7 @@ get_small_int(sdigit ival) { assert(IS_SMALL_INT(ival)); PyObject *v = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival]; - Py_INCREF(v); - return v; + return Py_NewRef(v); } static PyLongObject * @@ -1753,7 +1752,11 @@ pylong_int_to_decimal_string(PyObject *aa, if (s == NULL) { goto error; } - assert(PyUnicode_Check(s)); + if (!PyUnicode_Check(s)) { + PyErr_SetString(PyExc_TypeError, + "_pylong.int_to_decimal_string did not return a str"); + goto error; + } if (writer) { Py_ssize_t size = PyUnicode_GET_LENGTH(s); if (_PyUnicodeWriter_Prepare(writer, size, '9') == -1) { @@ -1781,8 +1784,7 @@ pylong_int_to_decimal_string(PyObject *aa, goto success; } else { - *p_output = (PyObject *)s; - Py_INCREF(s); + *p_output = Py_NewRef(s); goto success; } @@ -2362,6 +2364,7 @@ pylong_int_from_string(const char *start, const char *end, PyLongObject **res) } PyObject *s = PyUnicode_FromStringAndSize(start, end-start); if (s == NULL) { + Py_DECREF(mod); goto error; } PyObject *result = PyObject_CallMethod(mod, "int_from_string", "O", s); @@ -2371,14 +2374,16 @@ pylong_int_from_string(const char *start, const char *end, PyLongObject **res) goto error; } if (!PyLong_Check(result)) { - PyErr_SetString(PyExc_TypeError, "an integer is required"); + Py_DECREF(result); + PyErr_SetString(PyExc_TypeError, + "_pylong.int_from_string did not return an int"); goto error; } *res = (PyLongObject *)result; return 0; error: *res = NULL; - return 0; + return 0; // See the long_from_string_base() API comment. } #endif /* WITH_PYLONG_MODULE */ @@ -2593,8 +2598,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, memcpy(tmp->ob_digit, z->ob_digit, sizeof(digit) * size_z); - Py_DECREF(z); - z = tmp; + Py_SETREF(z, tmp); z->ob_digit[size_z] = (digit)c; ++size_z; } @@ -2617,7 +2621,8 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, * Return values: * * - Returns -1 on syntax error (exception needs to be set, *res is untouched) - * - Returns 0 and sets *res to NULL for MemoryError/OverflowError. + * - Returns 0 and sets *res to NULL for MemoryError, OverflowError, or + * _pylong.int_from_string() errors. * - Returns 0 and sets *res to an unsigned, unnormalized PyLong (success!). * * Afterwards *str is set to point to the first non-digit (which may be *str!). @@ -2903,8 +2908,7 @@ long_divrem(PyLongObject *a, PyLongObject *b, return -1; } PyObject *zero = _PyLong_GetZero(); - Py_INCREF(zero); - *pdiv = (PyLongObject*)zero; + *pdiv = (PyLongObject*)Py_NewRef(zero); return 0; } if (size_b == 1) { @@ -3739,10 +3743,8 @@ k_mul(PyLongObject *a, PyLongObject *b) assert(Py_SIZE(ah) > 0); /* the split isn't degenerate */ if (a == b) { - bh = ah; - bl = al; - Py_INCREF(bh); - Py_INCREF(bl); + bh = (PyLongObject*)Py_NewRef(ah); + bl = (PyLongObject*)Py_NewRef(al); } else if (kmul_split(b, shift, &bh, &bl) < 0) goto fail; @@ -3814,8 +3816,7 @@ k_mul(PyLongObject *a, PyLongObject *b) ah = al = NULL; if (a == b) { - t2 = t1; - Py_INCREF(t2); + t2 = (PyLongObject*)Py_NewRef(t1); } else if ((t2 = x_add(bh, bl)) == NULL) { Py_DECREF(t1); @@ -4059,12 +4060,10 @@ pylong_int_divmod(PyLongObject *v, PyLongObject *w, return -1; } if (pdiv != NULL) { - Py_INCREF(q); - *pdiv = (PyLongObject *)q; + *pdiv = (PyLongObject *)Py_NewRef(q); } if (pmod != NULL) { - Py_INCREF(r); - *pmod = (PyLongObject *)r; + *pmod = (PyLongObject *)Py_NewRef(r); } Py_DECREF(result); return 0; @@ -4140,8 +4139,7 @@ l_divmod(PyLongObject *v, PyLongObject *w, (Py_SIZE(mod) > 0 && Py_SIZE(w) < 0)) { PyLongObject *temp; temp = (PyLongObject *) long_add(mod, w); - Py_DECREF(mod); - mod = temp; + Py_SETREF(mod, temp); if (mod == NULL) { Py_DECREF(div); return -1; @@ -4152,8 +4150,7 @@ l_divmod(PyLongObject *v, PyLongObject *w, Py_DECREF(div); return -1; } - Py_DECREF(div); - div = temp; + Py_SETREF(div, temp); } if (pdiv != NULL) *pdiv = div; @@ -4189,8 +4186,7 @@ l_mod(PyLongObject *v, PyLongObject *w, PyLongObject **pmod) (Py_SIZE(mod) > 0 && Py_SIZE(w) < 0)) { PyLongObject *temp; temp = (PyLongObject *) long_add(mod, w); - Py_DECREF(mod); - mod = temp; + Py_SETREF(mod, temp); if (mod == NULL) return -1; } @@ -4430,8 +4426,7 @@ long_true_divide(PyObject *v, PyObject *w) else { PyLongObject *div, *rem; div = x_divrem(x, b, &rem); - Py_DECREF(x); - x = div; + Py_SETREF(x, div); if (x == NULL) goto error; if (Py_SIZE(rem)) @@ -4561,8 +4556,7 @@ long_invmod(PyLongObject *a, PyLongObject *n) if (l_divmod(a, n, &q, &r) == -1) { goto Error; } - Py_DECREF(a); - a = n; + Py_SETREF(a, n); n = r; t = (PyLongObject *)long_mul(q, c); Py_DECREF(q); @@ -4574,8 +4568,7 @@ long_invmod(PyLongObject *a, PyLongObject *n) if (s == NULL) { goto Error; } - Py_DECREF(b); - b = c; + Py_SETREF(b, c); c = s; } /* references now owned: a, b, c, n */ @@ -4620,7 +4613,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* k-ary values. If the exponent is large enough, table is * precomputed so that table[i] == a**(2*i+1) % c for i in * range(EXP_TABLE_LEN). - * Note: this is uninitialzed stack trash: don't pay to set it to known + * Note: this is uninitialized stack trash: don't pay to set it to known * values unless it's needed. Instead ensure that num_table_entries is * set to the number of entries actually filled whenever a branch to the * Error or Done labels is possible. @@ -4630,11 +4623,10 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* a, b, c = v, w, x */ CHECK_BINOP(v, w); - a = (PyLongObject*)v; Py_INCREF(a); - b = (PyLongObject*)w; Py_INCREF(b); + a = (PyLongObject*)Py_NewRef(v); + b = (PyLongObject*)Py_NewRef(w); if (PyLong_Check(x)) { - c = (PyLongObject *)x; - Py_INCREF(x); + c = (PyLongObject *)Py_NewRef(x); } else if (x == Py_None) c = NULL; @@ -4671,8 +4663,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = (PyLongObject *)_PyLong_Copy(c); if (temp == NULL) goto Error; - Py_DECREF(c); - c = temp; + Py_SETREF(c, temp); temp = NULL; _PyLong_Negate(&c); if (c == NULL) @@ -4692,8 +4683,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = (PyLongObject *)_PyLong_Copy(b); if (temp == NULL) goto Error; - Py_DECREF(b); - b = temp; + Py_SETREF(b, temp); temp = NULL; _PyLong_Negate(&b); if (b == NULL) @@ -4702,8 +4692,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = long_invmod(a, c); if (temp == NULL) goto Error; - Py_DECREF(a); - a = temp; + Py_SETREF(a, temp); temp = NULL; } @@ -4719,8 +4708,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) if (Py_SIZE(a) < 0 || Py_SIZE(a) > Py_SIZE(c)) { if (l_mod(a, c, &temp) < 0) goto Error; - Py_DECREF(a); - a = temp; + Py_SETREF(a, temp); temp = NULL; } } @@ -4787,9 +4775,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) * because we're primarily trying to cut overhead for small powers. */ assert(bi); /* else there is no significant bit */ - Py_INCREF(a); - Py_DECREF(z); - z = a; + Py_SETREF(z, (PyLongObject*)Py_NewRef(a)); for (bit = 2; ; bit <<= 1) { if (bit > bi) { /* found the first bit */ assert((bi & bit) == 0); @@ -4816,8 +4802,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* Left-to-right k-ary sliding window exponentiation * (Handbook of Applied Cryptography (HAC) Algorithm 14.85) */ - Py_INCREF(a); - table[0] = a; + table[0] = (PyLongObject*)Py_NewRef(a); num_table_entries = 1; MULT(a, a, a2); /* table[i] == a**(2*i + 1) % c */ @@ -4876,8 +4861,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = (PyLongObject *)long_sub(z, c); if (temp == NULL) goto Error; - Py_DECREF(z); - z = temp; + Py_SETREF(z, temp); temp = NULL; } goto Done; @@ -5354,11 +5338,12 @@ long_or(PyObject *a, PyObject *b) static PyObject * long_long(PyObject *v) { - if (PyLong_CheckExact(v)) - Py_INCREF(v); - else - v = _PyLong_Copy((PyLongObject *)v); - return v; + if (PyLong_CheckExact(v)) { + return Py_NewRef(v); + } + else { + return _PyLong_Copy((PyLongObject *)v); + } } PyObject * @@ -5445,8 +5430,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) /* no progress; do a Euclidean step */ if (l_mod(a, b, &r) < 0) goto error; - Py_DECREF(a); - a = b; + Py_SETREF(a, b); b = r; alloc_a = alloc_b; alloc_b = Py_SIZE(b); @@ -5465,8 +5449,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) Py_SET_SIZE(c, size_a); } else if (Py_REFCNT(a) == 1) { - Py_INCREF(a); - c = a; + c = (PyLongObject*)Py_NewRef(a); } else { alloc_a = size_a; @@ -5479,8 +5462,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) Py_SET_SIZE(d, size_a); } else if (Py_REFCNT(b) == 1 && size_a <= alloc_b) { - Py_INCREF(b); - d = b; + d = (PyLongObject*)Py_NewRef(b); Py_SET_SIZE(d, size_a); } else { @@ -5769,8 +5751,7 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) goto error; if (quo_is_neg) { temp = long_neg((PyLongObject*)twice_rem); - Py_DECREF(twice_rem); - twice_rem = temp; + Py_SETREF(twice_rem, temp); if (twice_rem == NULL) goto error; } @@ -5784,8 +5765,7 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) temp = long_sub(quo, (PyLongObject *)one); else temp = long_add(quo, (PyLongObject *)one); - Py_DECREF(quo); - quo = (PyLongObject *)temp; + Py_SETREF(quo, (PyLongObject *)temp); if (quo == NULL) goto error; /* and remainder */ @@ -5793,8 +5773,7 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) temp = long_add(rem, (PyLongObject *)b); else temp = long_sub(rem, (PyLongObject *)b); - Py_DECREF(rem); - rem = (PyLongObject *)temp; + Py_SETREF(rem, (PyLongObject *)temp); if (rem == NULL) goto error; } @@ -5860,8 +5839,7 @@ int___round___impl(PyObject *self, PyObject *o_ndigits) /* result = self - divmod_near(self, 10 ** -ndigits)[1] */ temp = long_neg((PyLongObject*)ndigits); - Py_DECREF(ndigits); - ndigits = temp; + Py_SETREF(ndigits, temp); if (ndigits == NULL) return NULL; @@ -5873,21 +5851,18 @@ int___round___impl(PyObject *self, PyObject *o_ndigits) temp = long_pow(result, ndigits, Py_None); Py_DECREF(ndigits); - Py_DECREF(result); - result = temp; + Py_SETREF(result, temp); if (result == NULL) return NULL; temp = _PyLong_DivmodNear(self, result); - Py_DECREF(result); - result = temp; + Py_SETREF(result, temp); if (result == NULL) return NULL; temp = long_sub((PyLongObject *)self, (PyLongObject *)PyTuple_GET_ITEM(result, 1)); - Py_DECREF(result); - result = temp; + Py_SETREF(result, temp); return result; } @@ -5952,8 +5927,7 @@ int_bit_length_impl(PyObject *self) Py_DECREF(x); if (y == NULL) goto error; - Py_DECREF(result); - result = y; + Py_SETREF(result, y); x = (PyLongObject *)PyLong_FromLong((long)msd_bits); if (x == NULL) @@ -5962,8 +5936,7 @@ int_bit_length_impl(PyObject *self) Py_DECREF(x); if (y == NULL) goto error; - Py_DECREF(result); - result = y; + Py_SETREF(result, y); return (PyObject *)result; @@ -6029,8 +6002,7 @@ int_bit_count_impl(PyObject *self) if (y == NULL) { goto error; } - Py_DECREF(result); - result = y; + Py_SETREF(result, y); } return result; diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index c5ab0bf2dedbe4..1d6cc3b508448d 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -682,8 +682,7 @@ mbuf_add_view(_PyManagedBufferObject *mbuf, const Py_buffer *src) init_suboffsets(dest, src); init_flags(mv); - mv->mbuf = mbuf; - Py_INCREF(mbuf); + mv->mbuf = (_PyManagedBufferObject*)Py_NewRef(mbuf); mbuf->exports++; return (PyObject *)mv; @@ -713,8 +712,7 @@ mbuf_add_incomplete_view(_PyManagedBufferObject *mbuf, const Py_buffer *src, dest = &mv->view; init_shared_values(dest, src); - mv->mbuf = mbuf; - Py_INCREF(mbuf); + mv->mbuf = (_PyManagedBufferObject*)Py_NewRef(mbuf); mbuf->exports++; return (PyObject *)mv; @@ -1102,8 +1100,7 @@ static PyObject * memory_enter(PyObject *self, PyObject *args) { CHECK_RELEASED(self); - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -1515,8 +1512,7 @@ memory_getbuf(PyMemoryViewObject *self, Py_buffer *view, int flags) } - view->obj = (PyObject *)self; - Py_INCREF(view->obj); + view->obj = Py_NewRef(self); self->exports++; return 0; @@ -2047,10 +2043,9 @@ struct_unpack_single(const char *ptr, struct unpacker *x) return NULL; if (PyTuple_GET_SIZE(v) == 1) { - PyObject *tmp = PyTuple_GET_ITEM(v, 0); - Py_INCREF(tmp); + PyObject *res = Py_NewRef(PyTuple_GET_ITEM(v, 0)); Py_DECREF(v); - return tmp; + return res; } return v; @@ -2496,8 +2491,7 @@ memory_subscript(PyMemoryViewObject *self, PyObject *key) return unpack_single(self, view->buf, fmt); } else if (key == Py_Ellipsis) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { PyErr_SetString(PyExc_TypeError, @@ -2957,8 +2951,7 @@ memory_richcompare(PyObject *v, PyObject *w, int op) unpacker_free(unpack_v); unpacker_free(unpack_w); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } /**************************************************************************/ @@ -3052,8 +3045,7 @@ memory_obj_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored)) if (view->obj == NULL) { Py_RETURN_NONE; } - Py_INCREF(view->obj); - return view->obj; + return Py_NewRef(view->obj); } static PyObject * @@ -3281,8 +3273,7 @@ memory_iter(PyObject *seq) it->it_fmt = fmt; it->it_length = memory_length(obj); it->it_index = 0; - Py_INCREF(seq); - it->it_seq = obj; + it->it_seq = (PyMemoryViewObject*)Py_NewRef(obj); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/methodobject.c b/Objects/methodobject.c index 953cf4666d33b7..51752dec3dd08c 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -88,8 +88,7 @@ PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *c if (om == NULL) { return NULL; } - Py_INCREF(cls); - om->mm_class = cls; + om->mm_class = (PyTypeObject*)Py_NewRef(cls); op = (PyCFunctionObject *)om; } else { if (cls) { @@ -106,10 +105,8 @@ PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *c op->m_weakreflist = NULL; op->m_ml = ml; - Py_XINCREF(self); - op->m_self = self; - Py_XINCREF(module); - op->m_module = module; + op->m_self = Py_XNewRef(self); + op->m_module = Py_XNewRef(module); op->vectorcall = vectorcall; _PyObject_GC_TRACK(op); return (PyObject *)op; @@ -260,8 +257,7 @@ meth_get__self__(PyCFunctionObject *m, void *closure) self = PyCFunction_GET_SELF(m); if (self == NULL) self = Py_None; - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyGetSetDef meth_getsets [] = { @@ -314,8 +310,7 @@ meth_richcompare(PyObject *self, PyObject *other, int op) res = eq ? Py_True : Py_False; else res = eq ? Py_False : Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static Py_hash_t diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index ee8ef7f5b4a5dc..8e03f2446f6fcd 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -9,7 +9,6 @@ #include "pycore_moduleobject.h" // _PyModule_GetDef() #include "structmember.h" // PyMemberDef -static Py_ssize_t max_module_number; static PyMemberDef module_members[] = { {"__dict__", T_OBJECT, offsetof(PyModuleObject, md_dict), READONLY}, @@ -43,10 +42,10 @@ PyModuleDef_Init(PyModuleDef* def) { assert(PyModuleDef_Type.tp_flags & Py_TPFLAGS_READY); if (def->m_base.m_index == 0) { - max_module_number++; + _PyRuntime.imports.last_module_index++; Py_SET_REFCNT(def, 1); Py_SET_TYPE(def, &PyModuleDef_Type); - def->m_base.m_index = max_module_number; + def->m_base.m_index = _PyRuntime.imports.last_module_index; } return (PyObject*)def; } @@ -70,8 +69,7 @@ module_init_dict(PyModuleObject *mod, PyObject *md_dict, if (PyDict_SetItem(md_dict, &_Py_ID(__spec__), Py_None) != 0) return -1; if (PyUnicode_CheckExact(name)) { - Py_INCREF(name); - Py_XSETREF(mod->md_name, name); + Py_XSETREF(mod->md_name, Py_NewRef(name)); } return 0; @@ -220,6 +218,7 @@ _PyModule_CreateInitialized(PyModuleDef* module, int module_api_version) _Py_PackageContext, and PyModule_Create*() will substitute this (if the name actually matches). */ +#define _Py_PackageContext (_PyRuntime.imports.pkgcontext) if (_Py_PackageContext != NULL) { const char *p = strrchr(_Py_PackageContext, '.'); if (p != NULL && strcmp(module->m_name, p+1) == 0) { @@ -227,6 +226,7 @@ _PyModule_CreateInitialized(PyModuleDef* module, int module_api_version) _Py_PackageContext = NULL; } } +#undef _Py_PackageContext if ((m = (PyModuleObject*)PyModule_New(name)) == NULL) return NULL; @@ -506,8 +506,7 @@ PyModule_GetNameObject(PyObject *m) } return NULL; } - Py_INCREF(name); - return name; + return Py_NewRef(name); } const char * @@ -541,8 +540,7 @@ PyModule_GetFilenameObject(PyObject *m) } return NULL; } - Py_INCREF(fileobj); - return fileobj; + return Py_NewRef(fileobj); } const char * diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c index 7875e7cafecb65..2cc4ddd3c91daa 100644 --- a/Objects/namespaceobject.c +++ b/Objects/namespaceobject.c @@ -85,9 +85,8 @@ namespace_repr(PyObject *ns) if (pairs == NULL) goto error; - d = ((_PyNamespaceObject *)ns)->ns_dict; - assert(d != NULL); - Py_INCREF(d); + assert(((_PyNamespaceObject *)ns)->ns_dict != NULL); + d = Py_NewRef(((_PyNamespaceObject *)ns)->ns_dict); keys = PyDict_Keys(d); if (keys == NULL) diff --git a/Objects/object.c b/Objects/object.c index 837f0a1a578ce6..687bd36d2b4af1 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -455,8 +455,7 @@ PyObject_Str(PyObject *v) if (PyUnicode_READY(v) < 0) return NULL; #endif - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (Py_TYPE(v)->tp_str == NULL) return PyObject_Repr(v); @@ -532,8 +531,7 @@ PyObject_Bytes(PyObject *v) return PyBytes_FromString("<NULL>"); if (PyBytes_CheckExact(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } func = _PyObject_LookupSpecial(v, &_Py_ID(__bytes__)); @@ -689,8 +687,7 @@ do_richcompare(PyThreadState *tstate, PyObject *v, PyObject *w, int op) Py_TYPE(w)->tp_name); return NULL; } - Py_INCREF(res); - return res; + return Py_NewRef(res); } /* Perform a rich comparison with object result. This wraps do_richcompare() @@ -1046,22 +1043,25 @@ PyObject_SetAttr(PyObject *v, PyObject *name, PyObject *value) PyObject ** _PyObject_ComputedDictPointer(PyObject *obj) { - Py_ssize_t dictoffset; PyTypeObject *tp = Py_TYPE(obj); - assert((tp->tp_flags & Py_TPFLAGS_MANAGED_DICT) == 0); - dictoffset = tp->tp_dictoffset; - if (dictoffset == 0) + + Py_ssize_t dictoffset = tp->tp_dictoffset; + if (dictoffset == 0) { return NULL; + } + if (dictoffset < 0) { assert(dictoffset != -1); + Py_ssize_t tsize = Py_SIZE(obj); if (tsize < 0) { tsize = -tsize; } size_t size = _PyObject_VAR_SIZE(tp, tsize); + assert(size <= (size_t)PY_SSIZE_T_MAX); + dictoffset += (Py_ssize_t)size; - dictoffset += (long)size; _PyObject_ASSERT(obj, dictoffset > 0); _PyObject_ASSERT(obj, dictoffset % SIZEOF_VOID_P == 0); } @@ -1096,8 +1096,7 @@ _PyObject_GetDictPtr(PyObject *obj) PyObject * PyObject_SelfIter(PyObject *obj) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } /* Helper used when the __next__ method is removed from a type: @@ -1481,8 +1480,7 @@ PyObject_GenericSetDict(PyObject *obj, PyObject *value, void *context) "not a '%.200s'", Py_TYPE(value)->tp_name); return -1; } - Py_INCREF(value); - Py_XSETREF(*dictptr, value); + Py_XSETREF(*dictptr, Py_NewRef(value)); return 0; } @@ -2006,7 +2004,7 @@ _PyTypes_FiniTypes(PyInterpreterState *interp) void _Py_NewReference(PyObject *op) { - if (_Py_tracemalloc_config.tracing) { + if (_PyRuntime.tracemalloc.config.tracing) { _PyTraceMalloc_NewReference(op); } #ifdef Py_REF_DEBUG diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index 449b618a0e76a0..276c5a276c06e6 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1,9 +1,16 @@ #include "Python.h" -#include "pycore_pymem.h" // _PyTraceMalloc_Config -#include "pycore_code.h" // stats +#include "pycore_code.h" // stats +#include "pycore_pystate.h" // _PyInterpreterState_GET + +#include "pycore_obmalloc.h" +#include "pycore_pymem.h" -#include <stdbool.h> #include <stdlib.h> // malloc() +#include <stdbool.h> + + +#undef uint +#define uint pymem_uint /* Defined in tracemalloc.c */ @@ -12,84 +19,19 @@ extern void _PyMem_DumpTraceback(int fd, const void *ptr); /* Python's malloc wrappers (see pymem.h) */ -#undef uint -#define uint unsigned int /* assuming >= 16 bits */ - -/* Forward declaration */ -static void* _PyMem_DebugRawMalloc(void *ctx, size_t size); -static void* _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize); -static void* _PyMem_DebugRawRealloc(void *ctx, void *ptr, size_t size); -static void _PyMem_DebugRawFree(void *ctx, void *ptr); - -static void* _PyMem_DebugMalloc(void *ctx, size_t size); -static void* _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize); -static void* _PyMem_DebugRealloc(void *ctx, void *ptr, size_t size); -static void _PyMem_DebugFree(void *ctx, void *p); - static void _PyObject_DebugDumpAddress(const void *p); static void _PyMem_DebugCheckAddress(const char *func, char api_id, const void *p); static void _PyMem_SetupDebugHooksDomain(PyMemAllocatorDomain domain); -#if defined(__has_feature) /* Clang */ -# if __has_feature(address_sanitizer) /* is ASAN enabled? */ -# define _Py_NO_SANITIZE_ADDRESS \ - __attribute__((no_sanitize("address"))) -# endif -# if __has_feature(thread_sanitizer) /* is TSAN enabled? */ -# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) -# endif -# if __has_feature(memory_sanitizer) /* is MSAN enabled? */ -# define _Py_NO_SANITIZE_MEMORY __attribute__((no_sanitize_memory)) -# endif -#elif defined(__GNUC__) -# if defined(__SANITIZE_ADDRESS__) /* GCC 4.8+, is ASAN enabled? */ -# define _Py_NO_SANITIZE_ADDRESS \ - __attribute__((no_sanitize_address)) -# endif - // TSAN is supported since GCC 5.1, but __SANITIZE_THREAD__ macro - // is provided only since GCC 7. -# if __GNUC__ > 5 || (__GNUC__ == 5 && __GNUC_MINOR__ >= 1) -# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) -# endif -#endif - -#ifndef _Py_NO_SANITIZE_ADDRESS -# define _Py_NO_SANITIZE_ADDRESS -#endif -#ifndef _Py_NO_SANITIZE_THREAD -# define _Py_NO_SANITIZE_THREAD -#endif -#ifndef _Py_NO_SANITIZE_MEMORY -# define _Py_NO_SANITIZE_MEMORY -#endif - -#ifdef WITH_PYMALLOC - -#ifdef MS_WINDOWS -# include <windows.h> -#elif defined(HAVE_MMAP) -# include <sys/mman.h> -# ifdef MAP_ANONYMOUS -# define ARENAS_USE_MMAP -# endif -#endif -/* Forward declaration */ -static void* _PyObject_Malloc(void *ctx, size_t size); -static void* _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize); -static void _PyObject_Free(void *ctx, void *p); -static void* _PyObject_Realloc(void *ctx, void *ptr, size_t size); -#endif +/***************************************/ +/* low-level allocator implementations */ +/***************************************/ +/* the default raw allocator (wraps malloc) */ -/* bpo-35053: Declare tracemalloc configuration here rather than - Modules/_tracemalloc.c because _tracemalloc can be compiled as dynamic - library, whereas _Py_NewReference() requires it. */ -struct _PyTraceMalloc_Config _Py_tracemalloc_config = _PyTraceMalloc_Config_INIT; - - -static void * +void * _PyMem_RawMalloc(void *Py_UNUSED(ctx), size_t size) { /* PyMem_RawMalloc(0) means malloc(1). Some systems would return NULL @@ -101,7 +43,7 @@ _PyMem_RawMalloc(void *Py_UNUSED(ctx), size_t size) return malloc(size); } -static void * +void * _PyMem_RawCalloc(void *Py_UNUSED(ctx), size_t nelem, size_t elsize) { /* PyMem_RawCalloc(0, 0) means calloc(1, 1). Some systems would return NULL @@ -115,7 +57,7 @@ _PyMem_RawCalloc(void *Py_UNUSED(ctx), size_t nelem, size_t elsize) return calloc(nelem, elsize); } -static void * +void * _PyMem_RawRealloc(void *Py_UNUSED(ctx), void *ptr, size_t size) { if (size == 0) @@ -123,32 +65,73 @@ _PyMem_RawRealloc(void *Py_UNUSED(ctx), void *ptr, size_t size) return realloc(ptr, size); } -static void +void _PyMem_RawFree(void *Py_UNUSED(ctx), void *ptr) { free(ptr); } +#define MALLOC_ALLOC {NULL, _PyMem_RawMalloc, _PyMem_RawCalloc, _PyMem_RawRealloc, _PyMem_RawFree} +#define PYRAW_ALLOC MALLOC_ALLOC -#ifdef MS_WINDOWS -static void * -_PyObject_ArenaVirtualAlloc(void *Py_UNUSED(ctx), size_t size) +/* the default object allocator */ + +// The actual implementation is further down. + +#ifdef WITH_PYMALLOC +void* _PyObject_Malloc(void *ctx, size_t size); +void* _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize); +void _PyObject_Free(void *ctx, void *p); +void* _PyObject_Realloc(void *ctx, void *ptr, size_t size); +# define PYMALLOC_ALLOC {NULL, _PyObject_Malloc, _PyObject_Calloc, _PyObject_Realloc, _PyObject_Free} +# define PYOBJ_ALLOC PYMALLOC_ALLOC +#else +# define PYOBJ_ALLOC MALLOC_ALLOC +#endif // WITH_PYMALLOC + +#define PYMEM_ALLOC PYOBJ_ALLOC + +/* the default debug allocators */ + +// The actual implementation is further down. + +void* _PyMem_DebugRawMalloc(void *ctx, size_t size); +void* _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize); +void* _PyMem_DebugRawRealloc(void *ctx, void *ptr, size_t size); +void _PyMem_DebugRawFree(void *ctx, void *ptr); + +void* _PyMem_DebugMalloc(void *ctx, size_t size); +void* _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize); +void* _PyMem_DebugRealloc(void *ctx, void *ptr, size_t size); +void _PyMem_DebugFree(void *ctx, void *p); + +#define PYDBGRAW_ALLOC \ + {&_PyRuntime.allocators.debug.raw, _PyMem_DebugRawMalloc, _PyMem_DebugRawCalloc, _PyMem_DebugRawRealloc, _PyMem_DebugRawFree} +#define PYDBGMEM_ALLOC \ + {&_PyRuntime.allocators.debug.mem, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} +#define PYDBGOBJ_ALLOC \ + {&_PyRuntime.allocators.debug.obj, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} + +/* the low-level virtual memory allocator */ + +#ifdef WITH_PYMALLOC +# ifdef MS_WINDOWS +# include <windows.h> +# elif defined(HAVE_MMAP) +# include <sys/mman.h> +# ifdef MAP_ANONYMOUS +# define ARENAS_USE_MMAP +# endif +# endif +#endif + +void * +_PyMem_ArenaAlloc(void *Py_UNUSED(ctx), size_t size) { +#ifdef MS_WINDOWS return VirtualAlloc(NULL, size, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE); -} - -static void -_PyObject_ArenaVirtualFree(void *Py_UNUSED(ctx), void *ptr, - size_t Py_UNUSED(size)) -{ - VirtualFree(ptr, 0, MEM_RELEASE); -} - #elif defined(ARENAS_USE_MMAP) -static void * -_PyObject_ArenaMmap(void *Py_UNUSED(ctx), size_t size) -{ void *ptr; ptr = mmap(NULL, size, PROT_READ|PROT_WRITE, MAP_PRIVATE|MAP_ANONYMOUS, -1, 0); @@ -156,72 +139,73 @@ _PyObject_ArenaMmap(void *Py_UNUSED(ctx), size_t size) return NULL; assert(ptr != NULL); return ptr; -} - -static void -_PyObject_ArenaMunmap(void *Py_UNUSED(ctx), void *ptr, size_t size) -{ - munmap(ptr, size); -} - #else -static void * -_PyObject_ArenaMalloc(void *Py_UNUSED(ctx), size_t size) -{ return malloc(size); +#endif } -static void -_PyObject_ArenaFree(void *Py_UNUSED(ctx), void *ptr, size_t Py_UNUSED(size)) +void +_PyMem_ArenaFree(void *Py_UNUSED(ctx), void *ptr, +#if defined(ARENAS_USE_MMAP) + size_t size +#else + size_t Py_UNUSED(size) +#endif +) { +#ifdef MS_WINDOWS + VirtualFree(ptr, 0, MEM_RELEASE); +#elif defined(ARENAS_USE_MMAP) + munmap(ptr, size); +#else free(ptr); -} #endif +} -#define MALLOC_ALLOC {NULL, _PyMem_RawMalloc, _PyMem_RawCalloc, _PyMem_RawRealloc, _PyMem_RawFree} -#ifdef WITH_PYMALLOC -# define PYMALLOC_ALLOC {NULL, _PyObject_Malloc, _PyObject_Calloc, _PyObject_Realloc, _PyObject_Free} -#endif +/*******************************************/ +/* end low-level allocator implementations */ +/*******************************************/ -#define PYRAW_ALLOC MALLOC_ALLOC -#ifdef WITH_PYMALLOC -# define PYOBJ_ALLOC PYMALLOC_ALLOC -#else -# define PYOBJ_ALLOC MALLOC_ALLOC + +#if defined(__has_feature) /* Clang */ +# if __has_feature(address_sanitizer) /* is ASAN enabled? */ +# define _Py_NO_SANITIZE_ADDRESS \ + __attribute__((no_sanitize("address"))) +# endif +# if __has_feature(thread_sanitizer) /* is TSAN enabled? */ +# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) +# endif +# if __has_feature(memory_sanitizer) /* is MSAN enabled? */ +# define _Py_NO_SANITIZE_MEMORY __attribute__((no_sanitize_memory)) +# endif +#elif defined(__GNUC__) +# if defined(__SANITIZE_ADDRESS__) /* GCC 4.8+, is ASAN enabled? */ +# define _Py_NO_SANITIZE_ADDRESS \ + __attribute__((no_sanitize_address)) +# endif + // TSAN is supported since GCC 5.1, but __SANITIZE_THREAD__ macro + // is provided only since GCC 7. +# if __GNUC__ > 5 || (__GNUC__ == 5 && __GNUC_MINOR__ >= 1) +# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) +# endif #endif -#define PYMEM_ALLOC PYOBJ_ALLOC -typedef struct { - /* We tag each block with an API ID in order to tag API violations */ - char api_id; - PyMemAllocatorEx alloc; -} debug_alloc_api_t; -static struct { - debug_alloc_api_t raw; - debug_alloc_api_t mem; - debug_alloc_api_t obj; -} _PyMem_Debug = { - {'r', PYRAW_ALLOC}, - {'m', PYMEM_ALLOC}, - {'o', PYOBJ_ALLOC} - }; +#ifndef _Py_NO_SANITIZE_ADDRESS +# define _Py_NO_SANITIZE_ADDRESS +#endif +#ifndef _Py_NO_SANITIZE_THREAD +# define _Py_NO_SANITIZE_THREAD +#endif +#ifndef _Py_NO_SANITIZE_MEMORY +# define _Py_NO_SANITIZE_MEMORY +#endif -#define PYDBGRAW_ALLOC \ - {&_PyMem_Debug.raw, _PyMem_DebugRawMalloc, _PyMem_DebugRawCalloc, _PyMem_DebugRawRealloc, _PyMem_DebugRawFree} -#define PYDBGMEM_ALLOC \ - {&_PyMem_Debug.mem, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} -#define PYDBGOBJ_ALLOC \ - {&_PyMem_Debug.obj, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} -#ifdef Py_DEBUG -static PyMemAllocatorEx _PyMem_Raw = PYDBGRAW_ALLOC; -static PyMemAllocatorEx _PyMem = PYDBGMEM_ALLOC; -static PyMemAllocatorEx _PyObject = PYDBGOBJ_ALLOC; -#else -static PyMemAllocatorEx _PyMem_Raw = PYRAW_ALLOC; -static PyMemAllocatorEx _PyMem = PYMEM_ALLOC; -static PyMemAllocatorEx _PyObject = PYOBJ_ALLOC; -#endif +#define _PyMem_Raw (_PyRuntime.allocators.standard.raw) +#define _PyMem (_PyRuntime.allocators.standard.mem) +#define _PyObject (_PyRuntime.allocators.standard.obj) +#define _PyMem_Debug (_PyRuntime.allocators.debug) +#define _PyObject_Arena (_PyRuntime.allocators.obj_arena) static int @@ -424,26 +408,6 @@ _PyMem_GetCurrentAllocatorName(void) } -#undef MALLOC_ALLOC -#undef PYMALLOC_ALLOC -#undef PYRAW_ALLOC -#undef PYMEM_ALLOC -#undef PYOBJ_ALLOC -#undef PYDBGRAW_ALLOC -#undef PYDBGMEM_ALLOC -#undef PYDBGOBJ_ALLOC - - -static PyObjectArenaAllocator _PyObject_Arena = {NULL, -#ifdef MS_WINDOWS - _PyObject_ArenaVirtualAlloc, _PyObject_ArenaVirtualFree -#elif defined(ARENAS_USE_MMAP) - _PyObject_ArenaMmap, _PyObject_ArenaMunmap -#else - _PyObject_ArenaMalloc, _PyObject_ArenaFree -#endif - }; - #ifdef WITH_PYMALLOC static int _PyMem_DebugEnabled(void) @@ -763,516 +727,15 @@ static int running_on_valgrind = -1; #endif -/* An object allocator for Python. - - Here is an introduction to the layers of the Python memory architecture, - showing where the object allocator is actually used (layer +2), It is - called for every object allocation and deallocation (PyObject_New/Del), - unless the object-specific allocators implement a proprietary allocation - scheme (ex.: ints use a simple free list). This is also the place where - the cyclic garbage collector operates selectively on container objects. - - - Object-specific allocators - _____ ______ ______ ________ - [ int ] [ dict ] [ list ] ... [ string ] Python core | -+3 | <----- Object-specific memory -----> | <-- Non-object memory --> | - _______________________________ | | - [ Python's object allocator ] | | -+2 | ####### Object memory ####### | <------ Internal buffers ------> | - ______________________________________________________________ | - [ Python's raw memory allocator (PyMem_ API) ] | -+1 | <----- Python memory (under PyMem manager's control) ------> | | - __________________________________________________________________ - [ Underlying general-purpose allocator (ex: C library malloc) ] - 0 | <------ Virtual memory allocated for the python process -------> | - - ========================================================================= - _______________________________________________________________________ - [ OS-specific Virtual Memory Manager (VMM) ] --1 | <--- Kernel dynamic storage allocation & management (page-based) ---> | - __________________________________ __________________________________ - [ ] [ ] --2 | <-- Physical memory: ROM/RAM --> | | <-- Secondary storage (swap) --> | - -*/ -/*==========================================================================*/ - -/* A fast, special-purpose memory allocator for small blocks, to be used - on top of a general-purpose malloc -- heavily based on previous art. */ - -/* Vladimir Marangozov -- August 2000 */ - -/* - * "Memory management is where the rubber meets the road -- if we do the wrong - * thing at any level, the results will not be good. And if we don't make the - * levels work well together, we are in serious trouble." (1) - * - * (1) Paul R. Wilson, Mark S. Johnstone, Michael Neely, and David Boles, - * "Dynamic Storage Allocation: A Survey and Critical Review", - * in Proc. 1995 Int'l. Workshop on Memory Management, September 1995. - */ - -/* #undef WITH_MEMORY_LIMITS */ /* disable mem limit checks */ - -/*==========================================================================*/ - -/* - * Allocation strategy abstract: - * - * For small requests, the allocator sub-allocates <Big> blocks of memory. - * Requests greater than SMALL_REQUEST_THRESHOLD bytes are routed to the - * system's allocator. - * - * Small requests are grouped in size classes spaced 8 bytes apart, due - * to the required valid alignment of the returned address. Requests of - * a particular size are serviced from memory pools of 4K (one VMM page). - * Pools are fragmented on demand and contain free lists of blocks of one - * particular size class. In other words, there is a fixed-size allocator - * for each size class. Free pools are shared by the different allocators - * thus minimizing the space reserved for a particular size class. - * - * This allocation strategy is a variant of what is known as "simple - * segregated storage based on array of free lists". The main drawback of - * simple segregated storage is that we might end up with lot of reserved - * memory for the different free lists, which degenerate in time. To avoid - * this, we partition each free list in pools and we share dynamically the - * reserved space between all free lists. This technique is quite efficient - * for memory intensive programs which allocate mainly small-sized blocks. - * - * For small requests we have the following table: - * - * Request in bytes Size of allocated block Size class idx - * ---------------------------------------------------------------- - * 1-8 8 0 - * 9-16 16 1 - * 17-24 24 2 - * 25-32 32 3 - * 33-40 40 4 - * 41-48 48 5 - * 49-56 56 6 - * 57-64 64 7 - * 65-72 72 8 - * ... ... ... - * 497-504 504 62 - * 505-512 512 63 - * - * 0, SMALL_REQUEST_THRESHOLD + 1 and up: routed to the underlying - * allocator. - */ - -/*==========================================================================*/ - -/* - * -- Main tunable settings section -- - */ - -/* - * Alignment of addresses returned to the user. 8-bytes alignment works - * on most current architectures (with 32-bit or 64-bit address buses). - * The alignment value is also used for grouping small requests in size - * classes spaced ALIGNMENT bytes apart. - * - * You shouldn't change this unless you know what you are doing. - */ - -#if SIZEOF_VOID_P > 4 -#define ALIGNMENT 16 /* must be 2^N */ -#define ALIGNMENT_SHIFT 4 -#else -#define ALIGNMENT 8 /* must be 2^N */ -#define ALIGNMENT_SHIFT 3 -#endif - -/* Return the number of bytes in size class I, as a uint. */ -#define INDEX2SIZE(I) (((uint)(I) + 1) << ALIGNMENT_SHIFT) - -/* - * Max size threshold below which malloc requests are considered to be - * small enough in order to use preallocated memory pools. You can tune - * this value according to your application behaviour and memory needs. - * - * Note: a size threshold of 512 guarantees that newly created dictionaries - * will be allocated from preallocated memory pools on 64-bit. - * - * The following invariants must hold: - * 1) ALIGNMENT <= SMALL_REQUEST_THRESHOLD <= 512 - * 2) SMALL_REQUEST_THRESHOLD is evenly divisible by ALIGNMENT - * - * Although not required, for better performance and space efficiency, - * it is recommended that SMALL_REQUEST_THRESHOLD is set to a power of 2. - */ -#define SMALL_REQUEST_THRESHOLD 512 -#define NB_SMALL_SIZE_CLASSES (SMALL_REQUEST_THRESHOLD / ALIGNMENT) - -/* - * The system's VMM page size can be obtained on most unices with a - * getpagesize() call or deduced from various header files. To make - * things simpler, we assume that it is 4K, which is OK for most systems. - * It is probably better if this is the native page size, but it doesn't - * have to be. In theory, if SYSTEM_PAGE_SIZE is larger than the native page - * size, then `POOL_ADDR(p)->arenaindex' could rarely cause a segmentation - * violation fault. 4K is apparently OK for all the platforms that python - * currently targets. - */ -#define SYSTEM_PAGE_SIZE (4 * 1024) - -/* - * Maximum amount of memory managed by the allocator for small requests. - */ -#ifdef WITH_MEMORY_LIMITS -#ifndef SMALL_MEMORY_LIMIT -#define SMALL_MEMORY_LIMIT (64 * 1024 * 1024) /* 64 MB -- more? */ -#endif -#endif - -#if !defined(WITH_PYMALLOC_RADIX_TREE) -/* Use radix-tree to track arena memory regions, for address_in_range(). - * Enable by default since it allows larger pool sizes. Can be disabled - * using -DWITH_PYMALLOC_RADIX_TREE=0 */ -#define WITH_PYMALLOC_RADIX_TREE 1 -#endif - -#if SIZEOF_VOID_P > 4 -/* on 64-bit platforms use larger pools and arenas if we can */ -#define USE_LARGE_ARENAS -#if WITH_PYMALLOC_RADIX_TREE -/* large pools only supported if radix-tree is enabled */ -#define USE_LARGE_POOLS -#endif -#endif - -/* - * The allocator sub-allocates <Big> blocks of memory (called arenas) aligned - * on a page boundary. This is a reserved virtual address space for the - * current process (obtained through a malloc()/mmap() call). In no way this - * means that the memory arenas will be used entirely. A malloc(<Big>) is - * usually an address range reservation for <Big> bytes, unless all pages within - * this space are referenced subsequently. So malloc'ing big blocks and not - * using them does not mean "wasting memory". It's an addressable range - * wastage... - * - * Arenas are allocated with mmap() on systems supporting anonymous memory - * mappings to reduce heap fragmentation. - */ -#ifdef USE_LARGE_ARENAS -#define ARENA_BITS 20 /* 1 MiB */ -#else -#define ARENA_BITS 18 /* 256 KiB */ -#endif -#define ARENA_SIZE (1 << ARENA_BITS) -#define ARENA_SIZE_MASK (ARENA_SIZE - 1) - -#ifdef WITH_MEMORY_LIMITS -#define MAX_ARENAS (SMALL_MEMORY_LIMIT / ARENA_SIZE) -#endif - -/* - * Size of the pools used for small blocks. Must be a power of 2. - */ -#ifdef USE_LARGE_POOLS -#define POOL_BITS 14 /* 16 KiB */ -#else -#define POOL_BITS 12 /* 4 KiB */ -#endif -#define POOL_SIZE (1 << POOL_BITS) -#define POOL_SIZE_MASK (POOL_SIZE - 1) - -#if !WITH_PYMALLOC_RADIX_TREE -#if POOL_SIZE != SYSTEM_PAGE_SIZE -# error "pool size must be equal to system page size" -#endif -#endif - -#define MAX_POOLS_IN_ARENA (ARENA_SIZE / POOL_SIZE) -#if MAX_POOLS_IN_ARENA * POOL_SIZE != ARENA_SIZE -# error "arena size not an exact multiple of pool size" -#endif - -/* - * -- End of tunable settings section -- - */ - -/*==========================================================================*/ - -/* When you say memory, my mind reasons in terms of (pointers to) blocks */ -typedef uint8_t block; - -/* Pool for small blocks. */ -struct pool_header { - union { block *_padding; - uint count; } ref; /* number of allocated blocks */ - block *freeblock; /* pool's free list head */ - struct pool_header *nextpool; /* next pool of this size class */ - struct pool_header *prevpool; /* previous pool "" */ - uint arenaindex; /* index into arenas of base adr */ - uint szidx; /* block size class index */ - uint nextoffset; /* bytes to virgin block */ - uint maxnextoffset; /* largest valid nextoffset */ -}; - -typedef struct pool_header *poolp; - -/* Record keeping for arenas. */ -struct arena_object { - /* The address of the arena, as returned by malloc. Note that 0 - * will never be returned by a successful malloc, and is used - * here to mark an arena_object that doesn't correspond to an - * allocated arena. - */ - uintptr_t address; - - /* Pool-aligned pointer to the next pool to be carved off. */ - block* pool_address; - - /* The number of available pools in the arena: free pools + never- - * allocated pools. - */ - uint nfreepools; - - /* The total number of pools in the arena, whether or not available. */ - uint ntotalpools; - - /* Singly-linked list of available pools. */ - struct pool_header* freepools; - - /* Whenever this arena_object is not associated with an allocated - * arena, the nextarena member is used to link all unassociated - * arena_objects in the singly-linked `unused_arena_objects` list. - * The prevarena member is unused in this case. - * - * When this arena_object is associated with an allocated arena - * with at least one available pool, both members are used in the - * doubly-linked `usable_arenas` list, which is maintained in - * increasing order of `nfreepools` values. - * - * Else this arena_object is associated with an allocated arena - * all of whose pools are in use. `nextarena` and `prevarena` - * are both meaningless in this case. - */ - struct arena_object* nextarena; - struct arena_object* prevarena; -}; - -#define POOL_OVERHEAD _Py_SIZE_ROUND_UP(sizeof(struct pool_header), ALIGNMENT) - -#define DUMMY_SIZE_IDX 0xffff /* size class of newly cached pools */ - -/* Round pointer P down to the closest pool-aligned address <= P, as a poolp */ -#define POOL_ADDR(P) ((poolp)_Py_ALIGN_DOWN((P), POOL_SIZE)) - -/* Return total number of blocks in pool of size index I, as a uint. */ -#define NUMBLOCKS(I) ((uint)(POOL_SIZE - POOL_OVERHEAD) / INDEX2SIZE(I)) - -/*==========================================================================*/ - -/* - * Pool table -- headed, circular, doubly-linked lists of partially used pools. - -This is involved. For an index i, usedpools[i+i] is the header for a list of -all partially used pools holding small blocks with "size class idx" i. So -usedpools[0] corresponds to blocks of size 8, usedpools[2] to blocks of size -16, and so on: index 2*i <-> blocks of size (i+1)<<ALIGNMENT_SHIFT. - -Pools are carved off an arena's highwater mark (an arena_object's pool_address -member) as needed. Once carved off, a pool is in one of three states forever -after: - -used == partially used, neither empty nor full - At least one block in the pool is currently allocated, and at least one - block in the pool is not currently allocated (note this implies a pool - has room for at least two blocks). - This is a pool's initial state, as a pool is created only when malloc - needs space. - The pool holds blocks of a fixed size, and is in the circular list headed - at usedpools[i] (see above). It's linked to the other used pools of the - same size class via the pool_header's nextpool and prevpool members. - If all but one block is currently allocated, a malloc can cause a - transition to the full state. If all but one block is not currently - allocated, a free can cause a transition to the empty state. - -full == all the pool's blocks are currently allocated - On transition to full, a pool is unlinked from its usedpools[] list. - It's not linked to from anything then anymore, and its nextpool and - prevpool members are meaningless until it transitions back to used. - A free of a block in a full pool puts the pool back in the used state. - Then it's linked in at the front of the appropriate usedpools[] list, so - that the next allocation for its size class will reuse the freed block. - -empty == all the pool's blocks are currently available for allocation - On transition to empty, a pool is unlinked from its usedpools[] list, - and linked to the front of its arena_object's singly-linked freepools list, - via its nextpool member. The prevpool member has no meaning in this case. - Empty pools have no inherent size class: the next time a malloc finds - an empty list in usedpools[], it takes the first pool off of freepools. - If the size class needed happens to be the same as the size class the pool - last had, some pool initialization can be skipped. - - -Block Management - -Blocks within pools are again carved out as needed. pool->freeblock points to -the start of a singly-linked list of free blocks within the pool. When a -block is freed, it's inserted at the front of its pool's freeblock list. Note -that the available blocks in a pool are *not* linked all together when a pool -is initialized. Instead only "the first two" (lowest addresses) blocks are -set up, returning the first such block, and setting pool->freeblock to a -one-block list holding the second such block. This is consistent with that -pymalloc strives at all levels (arena, pool, and block) never to touch a piece -of memory until it's actually needed. - -So long as a pool is in the used state, we're certain there *is* a block -available for allocating, and pool->freeblock is not NULL. If pool->freeblock -points to the end of the free list before we've carved the entire pool into -blocks, that means we simply haven't yet gotten to one of the higher-address -blocks. The offset from the pool_header to the start of "the next" virgin -block is stored in the pool_header nextoffset member, and the largest value -of nextoffset that makes sense is stored in the maxnextoffset member when a -pool is initialized. All the blocks in a pool have been passed out at least -once when and only when nextoffset > maxnextoffset. - - -Major obscurity: While the usedpools vector is declared to have poolp -entries, it doesn't really. It really contains two pointers per (conceptual) -poolp entry, the nextpool and prevpool members of a pool_header. The -excruciating initialization code below fools C so that - - usedpool[i+i] - -"acts like" a genuine poolp, but only so long as you only reference its -nextpool and prevpool members. The "- 2*sizeof(block *)" gibberish is -compensating for that a pool_header's nextpool and prevpool members -immediately follow a pool_header's first two members: - - union { block *_padding; - uint count; } ref; - block *freeblock; - -each of which consume sizeof(block *) bytes. So what usedpools[i+i] really -contains is a fudged-up pointer p such that *if* C believes it's a poolp -pointer, then p->nextpool and p->prevpool are both p (meaning that the headed -circular list is empty). - -It's unclear why the usedpools setup is so convoluted. It could be to -minimize the amount of cache required to hold this heavily-referenced table -(which only *needs* the two interpool pointer members of a pool_header). OTOH, -referencing code has to remember to "double the index" and doing so isn't -free, usedpools[0] isn't a strictly legal pointer, and we're crucially relying -on that C doesn't insert any padding anywhere in a pool_header at or before -the prevpool member. -**************************************************************************** */ - -#define PTA(x) ((poolp )((uint8_t *)&(usedpools[2*(x)]) - 2*sizeof(block *))) -#define PT(x) PTA(x), PTA(x) - -static poolp usedpools[2 * ((NB_SMALL_SIZE_CLASSES + 7) / 8) * 8] = { - PT(0), PT(1), PT(2), PT(3), PT(4), PT(5), PT(6), PT(7) -#if NB_SMALL_SIZE_CLASSES > 8 - , PT(8), PT(9), PT(10), PT(11), PT(12), PT(13), PT(14), PT(15) -#if NB_SMALL_SIZE_CLASSES > 16 - , PT(16), PT(17), PT(18), PT(19), PT(20), PT(21), PT(22), PT(23) -#if NB_SMALL_SIZE_CLASSES > 24 - , PT(24), PT(25), PT(26), PT(27), PT(28), PT(29), PT(30), PT(31) -#if NB_SMALL_SIZE_CLASSES > 32 - , PT(32), PT(33), PT(34), PT(35), PT(36), PT(37), PT(38), PT(39) -#if NB_SMALL_SIZE_CLASSES > 40 - , PT(40), PT(41), PT(42), PT(43), PT(44), PT(45), PT(46), PT(47) -#if NB_SMALL_SIZE_CLASSES > 48 - , PT(48), PT(49), PT(50), PT(51), PT(52), PT(53), PT(54), PT(55) -#if NB_SMALL_SIZE_CLASSES > 56 - , PT(56), PT(57), PT(58), PT(59), PT(60), PT(61), PT(62), PT(63) -#if NB_SMALL_SIZE_CLASSES > 64 -#error "NB_SMALL_SIZE_CLASSES should be less than 64" -#endif /* NB_SMALL_SIZE_CLASSES > 64 */ -#endif /* NB_SMALL_SIZE_CLASSES > 56 */ -#endif /* NB_SMALL_SIZE_CLASSES > 48 */ -#endif /* NB_SMALL_SIZE_CLASSES > 40 */ -#endif /* NB_SMALL_SIZE_CLASSES > 32 */ -#endif /* NB_SMALL_SIZE_CLASSES > 24 */ -#endif /* NB_SMALL_SIZE_CLASSES > 16 */ -#endif /* NB_SMALL_SIZE_CLASSES > 8 */ -}; - -/*========================================================================== -Arena management. - -`arenas` is a vector of arena_objects. It contains maxarenas entries, some of -which may not be currently used (== they're arena_objects that aren't -currently associated with an allocated arena). Note that arenas proper are -separately malloc'ed. - -Prior to Python 2.5, arenas were never free()'ed. Starting with Python 2.5, -we do try to free() arenas, and use some mild heuristic strategies to increase -the likelihood that arenas eventually can be freed. - -unused_arena_objects - - This is a singly-linked list of the arena_objects that are currently not - being used (no arena is associated with them). Objects are taken off the - head of the list in new_arena(), and are pushed on the head of the list in - PyObject_Free() when the arena is empty. Key invariant: an arena_object - is on this list if and only if its .address member is 0. - -usable_arenas - - This is a doubly-linked list of the arena_objects associated with arenas - that have pools available. These pools are either waiting to be reused, - or have not been used before. The list is sorted to have the most- - allocated arenas first (ascending order based on the nfreepools member). - This means that the next allocation will come from a heavily used arena, - which gives the nearly empty arenas a chance to be returned to the system. - In my unscientific tests this dramatically improved the number of arenas - that could be freed. - -Note that an arena_object associated with an arena all of whose pools are -currently in use isn't on either list. - -Changed in Python 3.8: keeping usable_arenas sorted by number of free pools -used to be done by one-at-a-time linear search when an arena's number of -free pools changed. That could, overall, consume time quadratic in the -number of arenas. That didn't really matter when there were only a few -hundred arenas (typical!), but could be a timing disaster when there were -hundreds of thousands. See bpo-37029. - -Now we have a vector of "search fingers" to eliminate the need to search: -nfp2lasta[nfp] returns the last ("rightmost") arena in usable_arenas -with nfp free pools. This is NULL if and only if there is no arena with -nfp free pools in usable_arenas. -*/ - -/* Array of objects used to track chunks of memory (arenas). */ -static struct arena_object* arenas = NULL; -/* Number of slots currently allocated in the `arenas` vector. */ -static uint maxarenas = 0; - -/* The head of the singly-linked, NULL-terminated list of available - * arena_objects. - */ -static struct arena_object* unused_arena_objects = NULL; - -/* The head of the doubly-linked, NULL-terminated at each end, list of - * arena_objects associated with arenas that have pools available. - */ -static struct arena_object* usable_arenas = NULL; - -/* nfp2lasta[nfp] is the last arena in usable_arenas with nfp free pools */ -static struct arena_object* nfp2lasta[MAX_POOLS_IN_ARENA + 1] = { NULL }; - -/* How many arena_objects do we initially allocate? - * 16 = can allocate 16 arenas = 16 * ARENA_SIZE = 4MB before growing the - * `arenas` vector. - */ -#define INITIAL_ARENA_OBJECTS 16 - -/* Number of arenas allocated that haven't been free()'d. */ -static size_t narenas_currently_allocated = 0; - -/* Total number of times malloc() called to allocate an arena. */ -static size_t ntimes_arena_allocated = 0; -/* High water mark (max value ever seen) for narenas_currently_allocated. */ -static size_t narenas_highwater = 0; - -static Py_ssize_t raw_allocated_blocks; +#define allarenas (_PyRuntime.obmalloc.mgmt.arenas) +#define maxarenas (_PyRuntime.obmalloc.mgmt.maxarenas) +#define unused_arena_objects (_PyRuntime.obmalloc.mgmt.unused_arena_objects) +#define usable_arenas (_PyRuntime.obmalloc.mgmt.usable_arenas) +#define nfp2lasta (_PyRuntime.obmalloc.mgmt.nfp2lasta) +#define narenas_currently_allocated (_PyRuntime.obmalloc.mgmt.narenas_currently_allocated) +#define ntimes_arena_allocated (_PyRuntime.obmalloc.mgmt.ntimes_arena_allocated) +#define narenas_highwater (_PyRuntime.obmalloc.mgmt.narenas_highwater) +#define raw_allocated_blocks (_PyRuntime.obmalloc.mgmt.raw_allocated_blocks) Py_ssize_t _Py_GetAllocatedBlocks(void) @@ -1281,15 +744,15 @@ _Py_GetAllocatedBlocks(void) /* add up allocated blocks for used pools */ for (uint i = 0; i < maxarenas; ++i) { /* Skip arenas which are not allocated. */ - if (arenas[i].address == 0) { + if (allarenas[i].address == 0) { continue; } - uintptr_t base = (uintptr_t)_Py_ALIGN_UP(arenas[i].address, POOL_SIZE); + uintptr_t base = (uintptr_t)_Py_ALIGN_UP(allarenas[i].address, POOL_SIZE); /* visit every pool in the arena */ - assert(base <= (uintptr_t) arenas[i].pool_address); - for (; base < (uintptr_t) arenas[i].pool_address; base += POOL_SIZE) { + assert(base <= (uintptr_t) allarenas[i].pool_address); + for (; base < (uintptr_t) allarenas[i].pool_address; base += POOL_SIZE) { poolp p = (poolp)base; n += p->ref.count; } @@ -1299,155 +762,18 @@ _Py_GetAllocatedBlocks(void) #if WITH_PYMALLOC_RADIX_TREE /*==========================================================================*/ -/* radix tree for tracking arena usage. If enabled, used to implement - address_in_range(). - - memory address bit allocation for keys - - 64-bit pointers, IGNORE_BITS=0 and 2^20 arena size: - 15 -> MAP_TOP_BITS - 15 -> MAP_MID_BITS - 14 -> MAP_BOT_BITS - 20 -> ideal aligned arena - ---- - 64 - - 64-bit pointers, IGNORE_BITS=16, and 2^20 arena size: - 16 -> IGNORE_BITS - 10 -> MAP_TOP_BITS - 10 -> MAP_MID_BITS - 8 -> MAP_BOT_BITS - 20 -> ideal aligned arena - ---- - 64 - - 32-bit pointers and 2^18 arena size: - 14 -> MAP_BOT_BITS - 18 -> ideal aligned arena - ---- - 32 - -*/ - -#if SIZEOF_VOID_P == 8 - -/* number of bits in a pointer */ -#define POINTER_BITS 64 - -/* High bits of memory addresses that will be ignored when indexing into the - * radix tree. Setting this to zero is the safe default. For most 64-bit - * machines, setting this to 16 would be safe. The kernel would not give - * user-space virtual memory addresses that have significant information in - * those high bits. The main advantage to setting IGNORE_BITS > 0 is that less - * virtual memory will be used for the top and middle radix tree arrays. Those - * arrays are allocated in the BSS segment and so will typically consume real - * memory only if actually accessed. - */ -#define IGNORE_BITS 0 - -/* use the top and mid layers of the radix tree */ -#define USE_INTERIOR_NODES - -#elif SIZEOF_VOID_P == 4 - -#define POINTER_BITS 32 -#define IGNORE_BITS 0 - -#else - - /* Currently this code works for 64-bit or 32-bit pointers only. */ -#error "obmalloc radix tree requires 64-bit or 32-bit pointers." - -#endif /* SIZEOF_VOID_P */ - -/* arena_coverage_t members require this to be true */ -#if ARENA_BITS >= 32 -# error "arena size must be < 2^32" -#endif - -/* the lower bits of the address that are not ignored */ -#define ADDRESS_BITS (POINTER_BITS - IGNORE_BITS) +/* radix tree for tracking arena usage. */ +#define arena_map_root (_PyRuntime.obmalloc.usage.arena_map_root) #ifdef USE_INTERIOR_NODES -/* number of bits used for MAP_TOP and MAP_MID nodes */ -#define INTERIOR_BITS ((ADDRESS_BITS - ARENA_BITS + 2) / 3) -#else -#define INTERIOR_BITS 0 -#endif - -#define MAP_TOP_BITS INTERIOR_BITS -#define MAP_TOP_LENGTH (1 << MAP_TOP_BITS) -#define MAP_TOP_MASK (MAP_TOP_LENGTH - 1) - -#define MAP_MID_BITS INTERIOR_BITS -#define MAP_MID_LENGTH (1 << MAP_MID_BITS) -#define MAP_MID_MASK (MAP_MID_LENGTH - 1) - -#define MAP_BOT_BITS (ADDRESS_BITS - ARENA_BITS - 2*INTERIOR_BITS) -#define MAP_BOT_LENGTH (1 << MAP_BOT_BITS) -#define MAP_BOT_MASK (MAP_BOT_LENGTH - 1) - -#define MAP_BOT_SHIFT ARENA_BITS -#define MAP_MID_SHIFT (MAP_BOT_BITS + MAP_BOT_SHIFT) -#define MAP_TOP_SHIFT (MAP_MID_BITS + MAP_MID_SHIFT) - -#define AS_UINT(p) ((uintptr_t)(p)) -#define MAP_BOT_INDEX(p) ((AS_UINT(p) >> MAP_BOT_SHIFT) & MAP_BOT_MASK) -#define MAP_MID_INDEX(p) ((AS_UINT(p) >> MAP_MID_SHIFT) & MAP_MID_MASK) -#define MAP_TOP_INDEX(p) ((AS_UINT(p) >> MAP_TOP_SHIFT) & MAP_TOP_MASK) - -#if IGNORE_BITS > 0 -/* Return the ignored part of the pointer address. Those bits should be same - * for all valid pointers if IGNORE_BITS is set correctly. - */ -#define HIGH_BITS(p) (AS_UINT(p) >> ADDRESS_BITS) -#else -#define HIGH_BITS(p) 0 -#endif - - -/* This is the leaf of the radix tree. See arena_map_mark_used() for the - * meaning of these members. */ -typedef struct { - int32_t tail_hi; - int32_t tail_lo; -} arena_coverage_t; - -typedef struct arena_map_bot { - /* The members tail_hi and tail_lo are accessed together. So, it - * better to have them as an array of structs, rather than two - * arrays. - */ - arena_coverage_t arenas[MAP_BOT_LENGTH]; -} arena_map_bot_t; - -#ifdef USE_INTERIOR_NODES -typedef struct arena_map_mid { - struct arena_map_bot *ptrs[MAP_MID_LENGTH]; -} arena_map_mid_t; - -typedef struct arena_map_top { - struct arena_map_mid *ptrs[MAP_TOP_LENGTH]; -} arena_map_top_t; -#endif - -/* The root of radix tree. Note that by initializing like this, the memory - * should be in the BSS. The OS will only memory map pages as the MAP_MID - * nodes get used (OS pages are demand loaded as needed). - */ -#ifdef USE_INTERIOR_NODES -static arena_map_top_t arena_map_root; -/* accounting for number of used interior nodes */ -static int arena_map_mid_count; -static int arena_map_bot_count; -#else -static arena_map_bot_t arena_map_root; +#define arena_map_mid_count (_PyRuntime.obmalloc.usage.arena_map_mid_count) +#define arena_map_bot_count (_PyRuntime.obmalloc.usage.arena_map_bot_count) #endif /* Return a pointer to a bottom tree node, return NULL if it doesn't exist or * it cannot be created */ static Py_ALWAYS_INLINE arena_map_bot_t * -arena_map_get(block *p, int create) +arena_map_get(pymem_block *p, int create) { #ifdef USE_INTERIOR_NODES /* sanity check that IGNORE_BITS is correct */ @@ -1512,12 +838,12 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) { /* sanity check that IGNORE_BITS is correct */ assert(HIGH_BITS(arena_base) == HIGH_BITS(&arena_map_root)); - arena_map_bot_t *n_hi = arena_map_get((block *)arena_base, is_used); + arena_map_bot_t *n_hi = arena_map_get((pymem_block *)arena_base, is_used); if (n_hi == NULL) { assert(is_used); /* otherwise node should already exist */ return 0; /* failed to allocate space for node */ } - int i3 = MAP_BOT_INDEX((block *)arena_base); + int i3 = MAP_BOT_INDEX((pymem_block *)arena_base); int32_t tail = (int32_t)(arena_base & ARENA_SIZE_MASK); if (tail == 0) { /* is ideal arena address */ @@ -1537,7 +863,7 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) * must overflow to 0. However, that would mean arena_base was * "ideal" and we should not be in this case. */ assert(arena_base < arena_base_next); - arena_map_bot_t *n_lo = arena_map_get((block *)arena_base_next, is_used); + arena_map_bot_t *n_lo = arena_map_get((pymem_block *)arena_base_next, is_used); if (n_lo == NULL) { assert(is_used); /* otherwise should already exist */ n_hi->arenas[i3].tail_hi = 0; @@ -1552,7 +878,7 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) /* Return true if 'p' is a pointer inside an obmalloc arena. * _PyObject_Free() calls this so it needs to be very fast. */ static int -arena_map_is_used(block *p) +arena_map_is_used(pymem_block *p) { arena_map_bot_t *n = arena_map_get(p, 0); if (n == NULL) { @@ -1582,11 +908,12 @@ new_arena(void) struct arena_object* arenaobj; uint excess; /* number of bytes above pool alignment */ void *address; - static int debug_stats = -1; + int debug_stats = _PyRuntime.obmalloc.dump_debug_stats; if (debug_stats == -1) { const char *opt = Py_GETENV("PYTHONMALLOCSTATS"); debug_stats = (opt != NULL && *opt != '\0'); + _PyRuntime.obmalloc.dump_debug_stats = debug_stats; } if (debug_stats) { _PyObject_DebugMallocStats(stderr); @@ -1604,14 +931,14 @@ new_arena(void) if (numarenas <= maxarenas) return NULL; /* overflow */ #if SIZEOF_SIZE_T <= SIZEOF_INT - if (numarenas > SIZE_MAX / sizeof(*arenas)) + if (numarenas > SIZE_MAX / sizeof(*allarenas)) return NULL; /* overflow */ #endif - nbytes = numarenas * sizeof(*arenas); - arenaobj = (struct arena_object *)PyMem_RawRealloc(arenas, nbytes); + nbytes = numarenas * sizeof(*allarenas); + arenaobj = (struct arena_object *)PyMem_RawRealloc(allarenas, nbytes); if (arenaobj == NULL) return NULL; - arenas = arenaobj; + allarenas = arenaobj; /* We might need to fix pointers that were copied. However, * new_arena only gets called when all the pages in the @@ -1624,13 +951,13 @@ new_arena(void) /* Put the new arenas on the unused_arena_objects list. */ for (i = maxarenas; i < numarenas; ++i) { - arenas[i].address = 0; /* mark as unassociated */ - arenas[i].nextarena = i < numarenas - 1 ? - &arenas[i+1] : NULL; + allarenas[i].address = 0; /* mark as unassociated */ + allarenas[i].nextarena = i < numarenas - 1 ? + &allarenas[i+1] : NULL; } /* Update globals. */ - unused_arena_objects = &arenas[maxarenas]; + unused_arena_objects = &allarenas[maxarenas]; maxarenas = numarenas; } @@ -1666,7 +993,7 @@ new_arena(void) arenaobj->freepools = NULL; /* pool_address <- first pool-aligned address in the arena nfreepools <- number of whole pools that fit after alignment */ - arenaobj->pool_address = (block*)arenaobj->address; + arenaobj->pool_address = (pymem_block*)arenaobj->address; arenaobj->nfreepools = MAX_POOLS_IN_ARENA; excess = (uint)(arenaobj->address & POOL_SIZE_MASK); if (excess != 0) { @@ -1777,14 +1104,16 @@ address_in_range(void *p, poolp pool) // only once. uint arenaindex = *((volatile uint *)&pool->arenaindex); return arenaindex < maxarenas && - (uintptr_t)p - arenas[arenaindex].address < ARENA_SIZE && - arenas[arenaindex].address != 0; + (uintptr_t)p - allarenas[arenaindex].address < ARENA_SIZE && + allarenas[arenaindex].address != 0; } #endif /* !WITH_PYMALLOC_RADIX_TREE */ /*==========================================================================*/ +#define usedpools (_PyRuntime.obmalloc.pools.used) + // Called when freelist is exhausted. Extend the freelist if there is // space for a block. Otherwise, remove this pool from usedpools. static void @@ -1792,9 +1121,9 @@ pymalloc_pool_extend(poolp pool, uint size) { if (UNLIKELY(pool->nextoffset <= pool->maxnextoffset)) { /* There is room for another block. */ - pool->freeblock = (block*)pool + pool->nextoffset; + pool->freeblock = (pymem_block*)pool + pool->nextoffset; pool->nextoffset += INDEX2SIZE(size); - *(block **)(pool->freeblock) = NULL; + *(pymem_block **)(pool->freeblock) = NULL; return; } @@ -1874,7 +1203,7 @@ allocate_from_new_pool(uint size) */ assert(usable_arenas->freepools != NULL || usable_arenas->pool_address <= - (block*)usable_arenas->address + + (pymem_block*)usable_arenas->address + ARENA_SIZE - POOL_SIZE); } } @@ -1883,10 +1212,10 @@ allocate_from_new_pool(uint size) assert(usable_arenas->nfreepools > 0); assert(usable_arenas->freepools == NULL); pool = (poolp)usable_arenas->pool_address; - assert((block*)pool <= (block*)usable_arenas->address + + assert((pymem_block*)pool <= (pymem_block*)usable_arenas->address + ARENA_SIZE - POOL_SIZE); - pool->arenaindex = (uint)(usable_arenas - arenas); - assert(&arenas[pool->arenaindex] == usable_arenas); + pool->arenaindex = (uint)(usable_arenas - allarenas); + assert(&allarenas[pool->arenaindex] == usable_arenas); pool->szidx = DUMMY_SIZE_IDX; usable_arenas->pool_address += POOL_SIZE; --usable_arenas->nfreepools; @@ -1905,7 +1234,7 @@ allocate_from_new_pool(uint size) } /* Frontlink to used pools. */ - block *bp; + pymem_block *bp; poolp next = usedpools[size + size]; /* == prev */ pool->nextpool = next; pool->prevpool = next; @@ -1919,7 +1248,7 @@ allocate_from_new_pool(uint size) */ bp = pool->freeblock; assert(bp != NULL); - pool->freeblock = *(block **)bp; + pool->freeblock = *(pymem_block **)bp; return bp; } /* @@ -1929,11 +1258,11 @@ allocate_from_new_pool(uint size) */ pool->szidx = size; size = INDEX2SIZE(size); - bp = (block *)pool + POOL_OVERHEAD; + bp = (pymem_block *)pool + POOL_OVERHEAD; pool->nextoffset = POOL_OVERHEAD + (size << 1); pool->maxnextoffset = POOL_SIZE - size; pool->freeblock = bp + size; - *(block **)(pool->freeblock) = NULL; + *(pymem_block **)(pool->freeblock) = NULL; return bp; } @@ -1966,7 +1295,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) uint size = (uint)(nbytes - 1) >> ALIGNMENT_SHIFT; poolp pool = usedpools[size + size]; - block *bp; + pymem_block *bp; if (LIKELY(pool != pool->nextpool)) { /* @@ -1977,7 +1306,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) bp = pool->freeblock; assert(bp != NULL); - if (UNLIKELY((pool->freeblock = *(block **)bp) == NULL)) { + if (UNLIKELY((pool->freeblock = *(pymem_block **)bp) == NULL)) { // Reached the end of the free list, try to extend it. pymalloc_pool_extend(pool, size); } @@ -1993,7 +1322,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) } -static void * +void * _PyObject_Malloc(void *ctx, size_t nbytes) { void* ptr = pymalloc_alloc(ctx, nbytes); @@ -2009,7 +1338,7 @@ _PyObject_Malloc(void *ctx, size_t nbytes) } -static void * +void * _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize) { assert(elsize == 0 || nelem <= (size_t)PY_SSIZE_T_MAX / elsize); @@ -2056,7 +1385,7 @@ insert_to_freepool(poolp pool) /* Link the pool to freepools. This is a singly-linked * list, and pool->prevpool isn't used there. */ - struct arena_object *ao = &arenas[pool->arenaindex]; + struct arena_object *ao = &allarenas[pool->arenaindex]; pool->nextpool = ao->freepools; ao->freepools = pool; uint nf = ao->nfreepools; @@ -2239,9 +1568,9 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p) * list in any case). */ assert(pool->ref.count > 0); /* else it was empty */ - block *lastfree = pool->freeblock; - *(block **)p = lastfree; - pool->freeblock = (block *)p; + pymem_block *lastfree = pool->freeblock; + *(pymem_block **)p = lastfree; + pool->freeblock = (pymem_block *)p; pool->ref.count--; if (UNLIKELY(lastfree == NULL)) { @@ -2273,7 +1602,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p) } -static void +void _PyObject_Free(void *ctx, void *p) { /* PyObject_Free(NULL) has no effect */ @@ -2359,7 +1688,7 @@ pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes) } -static void * +void * _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes) { void *ptr2; @@ -2536,13 +1865,13 @@ _PyMem_DebugRawAlloc(int use_calloc, void *ctx, size_t nbytes) return data; } -static void * +void * _PyMem_DebugRawMalloc(void *ctx, size_t nbytes) { return _PyMem_DebugRawAlloc(0, ctx, nbytes); } -static void * +void * _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize) { size_t nbytes; @@ -2557,7 +1886,7 @@ _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize) Then fills the original bytes with PYMEM_DEADBYTE. Then calls the underlying free. */ -static void +void _PyMem_DebugRawFree(void *ctx, void *p) { /* PyMem_Free(NULL) has no effect */ @@ -2577,7 +1906,7 @@ _PyMem_DebugRawFree(void *ctx, void *p) } -static void * +void * _PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes) { if (p == NULL) { @@ -2687,14 +2016,14 @@ _PyMem_DebugCheckGIL(const char *func) } } -static void * +void * _PyMem_DebugMalloc(void *ctx, size_t nbytes) { _PyMem_DebugCheckGIL(__func__); return _PyMem_DebugRawMalloc(ctx, nbytes); } -static void * +void * _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize) { _PyMem_DebugCheckGIL(__func__); @@ -2702,7 +2031,7 @@ _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize) } -static void +void _PyMem_DebugFree(void *ctx, void *ptr) { _PyMem_DebugCheckGIL(__func__); @@ -2710,7 +2039,7 @@ _PyMem_DebugFree(void *ctx, void *ptr) } -static void * +void * _PyMem_DebugRealloc(void *ctx, void *ptr, size_t nbytes) { _PyMem_DebugCheckGIL(__func__); @@ -3000,14 +2329,14 @@ _PyObject_DebugMallocStats(FILE *out) * will be living in full pools -- would be a shame to miss them. */ for (i = 0; i < maxarenas; ++i) { - uintptr_t base = arenas[i].address; + uintptr_t base = allarenas[i].address; /* Skip arenas which are not allocated. */ - if (arenas[i].address == (uintptr_t)NULL) + if (allarenas[i].address == (uintptr_t)NULL) continue; narenas += 1; - numfreepools += arenas[i].nfreepools; + numfreepools += allarenas[i].nfreepools; /* round up to pool alignment */ if (base & (uintptr_t)POOL_SIZE_MASK) { @@ -3017,8 +2346,8 @@ _PyObject_DebugMallocStats(FILE *out) } /* visit every pool in the arena */ - assert(base <= (uintptr_t) arenas[i].pool_address); - for (; base < (uintptr_t) arenas[i].pool_address; base += POOL_SIZE) { + assert(base <= (uintptr_t) allarenas[i].pool_address); + for (; base < (uintptr_t) allarenas[i].pool_address; base += POOL_SIZE) { poolp p = (poolp)base; const uint sz = p->szidx; uint freeblocks; @@ -3026,7 +2355,7 @@ _PyObject_DebugMallocStats(FILE *out) if (p->ref.count == 0) { /* currently unused */ #ifdef Py_DEBUG - assert(pool_is_in_list(p, arenas[i].freepools)); + assert(pool_is_in_list(p, allarenas[i].freepools)); #endif continue; } diff --git a/Objects/odictobject.c b/Objects/odictobject.c index bd2a7677fe1cf4..4976b70b5dff5a 100644 --- a/Objects/odictobject.c +++ b/Objects/odictobject.c @@ -889,8 +889,7 @@ odict_inplace_or(PyObject *self, PyObject *other) if (mutablemapping_update_arg(self, other) < 0) { return NULL; } - Py_INCREF(self); - return self; + return Py_NewRef(self); } /* tp_as_number */ @@ -1007,8 +1006,7 @@ OrderedDict_setdefault_impl(PyODictObject *self, PyObject *key, return NULL; assert(_odict_find_node(self, key) == NULL); if (PyODict_SetItem((PyObject *)self, key, default_value) >= 0) { - result = default_value; - Py_INCREF(result); + result = Py_NewRef(default_value); } } else { @@ -1024,8 +1022,7 @@ OrderedDict_setdefault_impl(PyODictObject *self, PyObject *key, result = PyObject_GetItem((PyObject *)self, key); } else if (PyObject_SetItem((PyObject *)self, key, default_value) >= 0) { - result = default_value; - Py_INCREF(result); + result = Py_NewRef(default_value); } } @@ -1055,8 +1052,7 @@ _odict_popkey_hash(PyObject *od, PyObject *key, PyObject *failobj, else if (value == NULL && !PyErr_Occurred()) { /* Apply the fallback value, if necessary. */ if (failobj) { - value = failobj; - Py_INCREF(failobj); + value = Py_NewRef(failobj); } else { PyErr_SetObject(PyExc_KeyError, key); @@ -1118,8 +1114,7 @@ OrderedDict_popitem_impl(PyODictObject *self, int last) } node = last ? _odict_LAST(self) : _odict_FIRST(self); - key = _odictnode_KEY(node); - Py_INCREF(key); + key = Py_NewRef(_odictnode_KEY(node)); value = _odict_popkey_hash((PyObject *)self, key, NULL, _odictnode_HASH(node)); if (value == NULL) return NULL; @@ -1497,8 +1492,7 @@ odict_richcompare(PyObject *v, PyObject *w, int op) return NULL; res = (eq == (op == Py_EQ)) ? Py_True : Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); } else { Py_RETURN_NOTIMPLEMENTED; } @@ -1714,8 +1708,7 @@ odictiter_nextkey(odictiterobject *di) di->di_current = NULL; } else { - di->di_current = _odictnode_KEY(node); - Py_INCREF(di->di_current); + di->di_current = Py_NewRef(_odictnode_KEY(node)); } return key; @@ -1872,12 +1865,10 @@ odictiter_new(PyODictObject *od, int kind) di->kind = kind; node = reversed ? _odict_LAST(od) : _odict_FIRST(od); - di->di_current = node ? _odictnode_KEY(node) : NULL; - Py_XINCREF(di->di_current); + di->di_current = node ? Py_NewRef(_odictnode_KEY(node)) : NULL; di->di_size = PyODict_SIZE(od); di->di_state = od->od_state; - di->di_odict = od; - Py_INCREF(od); + di->di_odict = (PyODictObject*)Py_NewRef(od); _PyObject_GC_TRACK(di); return (PyObject *)di; diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index f0b06de1c7d2a0..992e7c079ded54 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -105,10 +105,8 @@ range_from_array(PyTypeObject *type, PyObject *const *args, Py_ssize_t num_args) if (!stop) { return NULL; } - start = _PyLong_GetZero(); - Py_INCREF(start); - step = _PyLong_GetOne(); - Py_INCREF(step); + start = Py_NewRef(_PyLong_GetZero()); + step = Py_NewRef(_PyLong_GetOne()); break; case 0: PyErr_SetString(PyExc_TypeError, @@ -216,8 +214,7 @@ compute_range_length(PyObject *start, PyObject *stop, PyObject *step) if (cmp_result < 0) return NULL; result = zero; - Py_INCREF(result); - return result; + return Py_NewRef(result); } if ((tmp1 = PyNumber_Subtract(hi, lo)) == NULL) @@ -297,8 +294,7 @@ compute_range_item(rangeobject *r, PyObject *arg) return NULL; } } else { - i = arg; - Py_INCREF(i); + i = Py_NewRef(arg); } /* PyLong equivalent to: @@ -522,30 +518,24 @@ range_hash(rangeobject *r) t = PyTuple_New(3); if (!t) return -1; - Py_INCREF(r->length); - PyTuple_SET_ITEM(t, 0, r->length); + PyTuple_SET_ITEM(t, 0, Py_NewRef(r->length)); cmp_result = PyObject_Not(r->length); if (cmp_result == -1) goto end; if (cmp_result == 1) { - Py_INCREF(Py_None); - Py_INCREF(Py_None); - PyTuple_SET_ITEM(t, 1, Py_None); - PyTuple_SET_ITEM(t, 2, Py_None); + PyTuple_SET_ITEM(t, 1, Py_NewRef(Py_None)); + PyTuple_SET_ITEM(t, 2, Py_NewRef(Py_None)); } else { - Py_INCREF(r->start); - PyTuple_SET_ITEM(t, 1, r->start); + PyTuple_SET_ITEM(t, 1, Py_NewRef(r->start)); cmp_result = PyObject_RichCompareBool(r->length, _PyLong_GetOne(), Py_EQ); if (cmp_result == -1) goto end; if (cmp_result == 1) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(t, 2, Py_None); + PyTuple_SET_ITEM(t, 2, Py_NewRef(Py_None)); } else { - Py_INCREF(r->step); - PyTuple_SET_ITEM(t, 2, r->step); + PyTuple_SET_ITEM(t, 2, Py_NewRef(r->step)); } } result = PyObject_Hash(t); @@ -766,18 +756,19 @@ PyTypeObject PyRange_Type = { static PyObject * rangeiter_next(_PyRangeIterObject *r) { - if (r->index < r->len) - /* cast to unsigned to avoid possible signed overflow - in intermediate calculations. */ - return PyLong_FromLong((long)(r->start + - (unsigned long)(r->index++) * r->step)); + if (r->len > 0) { + long result = r->start; + r->start = result + r->step; + r->len--; + return PyLong_FromLong(result); + } return NULL; } static PyObject * rangeiter_len(_PyRangeIterObject *r, PyObject *Py_UNUSED(ignored)) { - return PyLong_FromLong(r->len - r->index); + return PyLong_FromLong(r->len); } PyDoc_STRVAR(length_hint_doc, @@ -804,8 +795,8 @@ rangeiter_reduce(_PyRangeIterObject *r, PyObject *Py_UNUSED(ignored)) if (range == NULL) goto err; /* return the result */ - return Py_BuildValue( - "N(N)l", _PyEval_GetBuiltin(&_Py_ID(iter)), range, r->index); + return Py_BuildValue("N(N)O", _PyEval_GetBuiltin(&_Py_ID(iter)), + range, Py_None); err: Py_XDECREF(start); Py_XDECREF(stop); @@ -824,7 +815,8 @@ rangeiter_setstate(_PyRangeIterObject *r, PyObject *state) index = 0; else if (index > r->len) index = r->len; /* exhausted iterator */ - r->index = index; + r->start += index * r->step; + r->len -= index; Py_RETURN_NONE; } @@ -914,13 +906,11 @@ fast_range_iter(long start, long stop, long step, long len) it->start = start; it->step = step; it->len = len; - it->index = 0; return (PyObject *)it; } typedef struct { PyObject_HEAD - PyObject *index; PyObject *start; PyObject *step; PyObject *len; @@ -929,7 +919,8 @@ typedef struct { static PyObject * longrangeiter_len(longrangeiterobject *r, PyObject *no_args) { - return PyNumber_Subtract(r->len, r->index); + Py_INCREF(r->len); + return r->len; } static PyObject * @@ -946,10 +937,8 @@ longrangeiter_reduce(longrangeiterobject *r, PyObject *Py_UNUSED(ignored)) Py_DECREF(product); if (stop == NULL) return NULL; - Py_INCREF(r->start); - Py_INCREF(r->step); range = (PyObject*)make_range_object(&PyRange_Type, - r->start, stop, r->step); + Py_NewRef(r->start), stop, Py_NewRef(r->step)); if (range == NULL) { Py_DECREF(r->start); Py_DECREF(stop); @@ -958,8 +947,8 @@ longrangeiter_reduce(longrangeiterobject *r, PyObject *Py_UNUSED(ignored)) } /* return the result */ - return Py_BuildValue( - "N(N)O", _PyEval_GetBuiltin(&_Py_ID(iter)), range, r->index); + return Py_BuildValue("N(N)O", _PyEval_GetBuiltin(&_Py_ID(iter)), + range, Py_None); } static PyObject * @@ -982,8 +971,22 @@ longrangeiter_setstate(longrangeiterobject *r, PyObject *state) if (cmp > 0) state = r->len; } - Py_INCREF(state); - Py_XSETREF(r->index, state); + PyObject *product = PyNumber_Multiply(state, r->step); + if (product == NULL) + return NULL; + PyObject *new_start = PyNumber_Add(r->start, product); + Py_DECREF(product); + if (new_start == NULL) + return NULL; + PyObject *new_len = PyNumber_Subtract(r->len, state); + if (new_len == NULL) { + Py_DECREF(new_start); + return NULL; + } + PyObject *tmp = r->start; + r->start = new_start; + Py_SETREF(r->len, new_len); + Py_DECREF(tmp); Py_RETURN_NONE; } @@ -1000,7 +1003,6 @@ static PyMethodDef longrangeiter_methods[] = { static void longrangeiter_dealloc(longrangeiterobject *r) { - Py_XDECREF(r->index); Py_XDECREF(r->start); Py_XDECREF(r->step); Py_XDECREF(r->len); @@ -1010,29 +1012,21 @@ longrangeiter_dealloc(longrangeiterobject *r) static PyObject * longrangeiter_next(longrangeiterobject *r) { - PyObject *product, *new_index, *result; - if (PyObject_RichCompareBool(r->index, r->len, Py_LT) != 1) + if (PyObject_RichCompareBool(r->len, _PyLong_GetZero(), Py_GT) != 1) return NULL; - new_index = PyNumber_Add(r->index, _PyLong_GetOne()); - if (!new_index) + PyObject *new_start = PyNumber_Add(r->start, r->step); + if (new_start == NULL) { return NULL; - - product = PyNumber_Multiply(r->index, r->step); - if (!product) { - Py_DECREF(new_index); - return NULL; - } - - result = PyNumber_Add(r->start, product); - Py_DECREF(product); - if (result) { - Py_SETREF(r->index, new_index); } - else { - Py_DECREF(new_index); + PyObject *new_len = PyNumber_Subtract(r->len, _PyLong_GetOne()); + if (new_len == NULL) { + Py_DECREF(new_start); + return NULL; } - + PyObject *result = r->start; + r->start = new_start; + Py_SETREF(r->len, new_len); return result; } @@ -1118,14 +1112,9 @@ range_iter(PyObject *seq) if (it == NULL) return NULL; - it->start = r->start; - it->step = r->step; - it->len = r->length; - it->index = _PyLong_GetZero(); - Py_INCREF(it->start); - Py_INCREF(it->step); - Py_INCREF(it->len); - Py_INCREF(it->index); + it->start = Py_NewRef(r->start); + it->step = Py_NewRef(r->step); + it->len = Py_NewRef(r->length); return (PyObject *)it; } @@ -1203,11 +1192,10 @@ range_reverse(PyObject *seq, PyObject *Py_UNUSED(ignored)) it = PyObject_New(longrangeiterobject, &PyLongRangeIter_Type); if (it == NULL) return NULL; - it->index = it->start = it->step = NULL; + it->start = it->step = NULL; /* start + (len - 1) * step */ - it->len = range->length; - Py_INCREF(it->len); + it->len = Py_NewRef(range->length); diff = PyNumber_Subtract(it->len, _PyLong_GetOne()); if (!diff) @@ -1228,8 +1216,6 @@ range_reverse(PyObject *seq, PyObject *Py_UNUSED(ignored)) if (!it->step) goto create_failure; - it->index = _PyLong_GetZero(); - Py_INCREF(it->index); return (PyObject *)it; create_failure: diff --git a/Objects/setobject.c b/Objects/setobject.c index dd55a943010ab8..fcdda2a0bca2b6 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -588,8 +588,7 @@ set_merge(PySetObject *so, PyObject *otherset) key = other_entry->key; if (key != NULL) { assert(so_entry->key == NULL); - Py_INCREF(key); - so_entry->key = key; + so_entry->key = Py_NewRef(key); so_entry->hash = other_entry->hash; } } @@ -607,8 +606,8 @@ set_merge(PySetObject *so, PyObject *otherset) for (i = other->mask + 1; i > 0 ; i--, other_entry++) { key = other_entry->key; if (key != NULL && key != dummy) { - Py_INCREF(key); - set_insert_clean(newtable, newmask, key, other_entry->hash); + set_insert_clean(newtable, newmask, Py_NewRef(key), + other_entry->hash); } } return 0; @@ -820,8 +819,7 @@ static PyObject *setiter_iternext(setiterobject *si) goto fail; si->len--; key = entry[i].key; - Py_INCREF(key); - return key; + return Py_NewRef(key); fail: si->si_set = NULL; @@ -868,8 +866,7 @@ set_iter(PySetObject *so) setiterobject *si = PyObject_GC_New(setiterobject, &PySetIter_Type); if (si == NULL) return NULL; - Py_INCREF(so); - si->si_set = so; + si->si_set = (PySetObject*)Py_NewRef(so); si->si_used = so->used; si->si_pos = 0; si->len = so->used; @@ -997,8 +994,7 @@ make_new_frozenset(PyTypeObject *type, PyObject *iterable) if (iterable != NULL && PyFrozenSet_CheckExact(iterable)) { /* frozenset(f) is idempotent */ - Py_INCREF(iterable); - return iterable; + return Py_NewRef(iterable); } return make_new_set(type, iterable); } @@ -1100,8 +1096,7 @@ static PyObject * frozenset_copy(PySetObject *so, PyObject *Py_UNUSED(ignored)) { if (PyFrozenSet_CheckExact(so)) { - Py_INCREF(so); - return (PyObject *)so; + return Py_NewRef(so); } return set_copy(so, NULL); } @@ -1173,8 +1168,7 @@ set_ior(PySetObject *so, PyObject *other) if (set_update_internal(so, other)) return NULL; - Py_INCREF(so); - return (PyObject *)so; + return Py_NewRef(so); } static PyObject * @@ -1264,12 +1258,11 @@ static PyObject * set_intersection_multi(PySetObject *so, PyObject *args) { Py_ssize_t i; - PyObject *result = (PyObject *)so; if (PyTuple_GET_SIZE(args) == 0) return set_copy(so, NULL); - Py_INCREF(so); + PyObject *result = Py_NewRef(so); for (i=0 ; i<PyTuple_GET_SIZE(args) ; i++) { PyObject *other = PyTuple_GET_ITEM(args, i); PyObject *newresult = set_intersection((PySetObject *)result, other); @@ -1277,8 +1270,7 @@ set_intersection_multi(PySetObject *so, PyObject *args) Py_DECREF(result); return NULL; } - Py_DECREF(result); - result = newresult; + Py_SETREF(result, newresult); } return result; } @@ -1336,8 +1328,7 @@ set_iand(PySetObject *so, PyObject *other) if (result == NULL) return NULL; Py_DECREF(result); - Py_INCREF(so); - return (PyObject *)so; + return Py_NewRef(so); } static PyObject * @@ -1609,8 +1600,7 @@ set_isub(PySetObject *so, PyObject *other) Py_RETURN_NOTIMPLEMENTED; if (set_difference_update_internal(so, other)) return NULL; - Py_INCREF(so); - return (PyObject *)so; + return Py_NewRef(so); } static PyObject * @@ -1647,8 +1637,7 @@ set_symmetric_difference_update(PySetObject *so, PyObject *other) } if (PyAnySet_Check(other)) { - Py_INCREF(other); - otherset = (PySetObject *)other; + otherset = (PySetObject *)Py_NewRef(other); } else { otherset = (PySetObject *)make_new_set_basetype(Py_TYPE(so), other); if (otherset == NULL) @@ -1723,8 +1712,7 @@ set_ixor(PySetObject *so, PyObject *other) if (result == NULL) return NULL; Py_DECREF(result); - Py_INCREF(so); - return (PyObject *)so; + return Py_NewRef(so); } static PyObject * @@ -1969,12 +1957,11 @@ set_reduce(PySetObject *so, PyObject *Py_UNUSED(ignored)) static PyObject * set_sizeof(PySetObject *so, PyObject *Py_UNUSED(ignored)) { - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(so)); - if (so->table != so->smalltable) - res = res + (so->mask + 1) * sizeof(setentry); - return PyLong_FromSsize_t(res); + size_t res = _PyObject_SIZE(Py_TYPE(so)); + if (so->table != so->smalltable) { + res += ((size_t)so->mask + 1) * sizeof(setentry); + } + return PyLong_FromSize_t(res); } PyDoc_STRVAR(sizeof_doc, "S.__sizeof__() -> size of S in memory, in bytes"); diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index e37623f38ba1f3..5694bd9c661fa5 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -26,8 +26,7 @@ ellipsis_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) PyErr_SetString(PyExc_TypeError, "EllipsisType takes no arguments"); return NULL; } - Py_INCREF(Py_Ellipsis); - return Py_Ellipsis; + return Py_NewRef(Py_Ellipsis); } static PyObject * @@ -153,9 +152,8 @@ PySlice_New(PyObject *start, PyObject *stop, PyObject *step) if (stop == NULL) { stop = Py_None; } - Py_INCREF(start); - Py_INCREF(stop); - return (PyObject *) _PyBuildSlice_Consume2(start, stop, step); + return (PyObject *)_PyBuildSlice_Consume2(Py_NewRef(start), + Py_NewRef(stop), step); } PyObject * @@ -406,8 +404,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, /* Convert step to an integer; raise for zero step. */ if (self->step == Py_None) { - step = _PyLong_GetOne(); - Py_INCREF(step); + step = Py_NewRef(_PyLong_GetOne()); step_is_negative = 0; } else { @@ -435,16 +432,13 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, goto error; } else { - lower = _PyLong_GetZero(); - Py_INCREF(lower); - upper = length; - Py_INCREF(upper); + lower = Py_NewRef(_PyLong_GetZero()); + upper = Py_NewRef(length); } /* Compute start. */ if (self->start == Py_None) { - start = step_is_negative ? upper : lower; - Py_INCREF(start); + start = Py_NewRef(step_is_negative ? upper : lower); } else { start = evaluate_slice_index(self->start); @@ -454,8 +448,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (_PyLong_Sign(start) < 0) { /* start += length */ PyObject *tmp = PyNumber_Add(start, length); - Py_DECREF(start); - start = tmp; + Py_SETREF(start, tmp); if (start == NULL) goto error; @@ -463,9 +456,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(lower); - Py_DECREF(start); - start = lower; + Py_SETREF(start, Py_NewRef(lower)); } } else { @@ -473,17 +464,14 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(upper); - Py_DECREF(start); - start = upper; + Py_SETREF(start, Py_NewRef(upper)); } } } /* Compute stop. */ if (self->stop == Py_None) { - stop = step_is_negative ? lower : upper; - Py_INCREF(stop); + stop = Py_NewRef(step_is_negative ? lower : upper); } else { stop = evaluate_slice_index(self->stop); @@ -493,8 +481,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (_PyLong_Sign(stop) < 0) { /* stop += length */ PyObject *tmp = PyNumber_Add(stop, length); - Py_DECREF(stop); - stop = tmp; + Py_SETREF(stop, tmp); if (stop == NULL) goto error; @@ -502,9 +489,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(lower); - Py_DECREF(stop); - stop = lower; + Py_SETREF(stop, Py_NewRef(lower)); } } else { @@ -512,9 +497,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(upper); - Py_DECREF(stop); - stop = upper; + Py_SETREF(stop, Py_NewRef(upper)); } } } @@ -609,8 +592,7 @@ slice_richcompare(PyObject *v, PyObject *w, int op) res = Py_False; break; } - Py_INCREF(res); - return res; + return Py_NewRef(res); } diff --git a/Objects/stringlib/join.h b/Objects/stringlib/join.h index bb011f7db796d8..de6bd83ffe4c8b 100644 --- a/Objects/stringlib/join.h +++ b/Objects/stringlib/join.h @@ -63,8 +63,7 @@ STRINGLIB(bytes_join)(PyObject *sep, PyObject *iterable) item = PySequence_Fast_GET_ITEM(seq, i); if (PyBytes_CheckExact(item)) { /* Fast path. */ - Py_INCREF(item); - buffers[i].obj = item; + buffers[i].obj = Py_NewRef(item); buffers[i].buf = PyBytes_AS_STRING(item); buffers[i].len = PyBytes_GET_SIZE(item); } diff --git a/Objects/stringlib/transmogrify.h b/Objects/stringlib/transmogrify.h index e1165ea38e8256..71099bb586e809 100644 --- a/Objects/stringlib/transmogrify.h +++ b/Objects/stringlib/transmogrify.h @@ -17,8 +17,7 @@ return_self(PyObject *self) { #if !STRINGLIB_MUTABLE if (STRINGLIB_CHECK_EXACT(self)) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } #endif return STRINGLIB_NEW(STRINGLIB_STR(self), STRINGLIB_LEN(self)); diff --git a/Objects/stringlib/unicode_format.h b/Objects/stringlib/unicode_format.h index a4eea7b91988b9..ccd7c77c0a03fd 100644 --- a/Objects/stringlib/unicode_format.h +++ b/Objects/stringlib/unicode_format.h @@ -473,8 +473,7 @@ get_field_object(SubString *input, PyObject *args, PyObject *kwargs, goto error; /* assign to obj */ - Py_DECREF(obj); - obj = tmp; + Py_SETREF(obj, tmp); } /* end of iterator, this is the non-error case */ if (ok == 1) @@ -825,8 +824,7 @@ output_markup(SubString *field_name, SubString *format_spec, goto done; /* do the assignment, transferring ownership: fieldobj = tmp */ - Py_DECREF(fieldobj); - fieldobj = tmp; + Py_SETREF(fieldobj, tmp); tmp = NULL; } @@ -1042,8 +1040,7 @@ formatteriter_next(formatteriterobject *it) otherwise create a one length string with the conversion character */ if (conversion == '\0') { - conversion_str = Py_None; - Py_INCREF(conversion_str); + conversion_str = Py_NewRef(Py_None); } else conversion_str = PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, @@ -1121,8 +1118,7 @@ formatter_parser(PyObject *ignored, PyObject *self) return NULL; /* take ownership, give the object to the iterator */ - Py_INCREF(self); - it->str = self; + it->str = Py_NewRef(self); /* initialize the contained MarkupIterator */ MarkupIterator_init(&it->it_markup, (PyObject*)self, 0, PyUnicode_GET_LENGTH(self)); @@ -1265,8 +1261,7 @@ formatter_field_name_split(PyObject *ignored, PyObject *self) /* take ownership, give the object to the iterator. this is just to keep the field_name alive */ - Py_INCREF(self); - it->str = self; + it->str = Py_NewRef(self); /* Pass in auto_number = NULL. We'll return an empty string for first_obj in that case. */ diff --git a/Objects/structseq.c b/Objects/structseq.c index 9a7013372e688c..100ccfef0a23c4 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -200,8 +200,7 @@ structseq_new_impl(PyTypeObject *type, PyObject *arg, PyObject *dict) } for (i = 0; i < len; ++i) { PyObject *v = PySequence_Fast_GET_ITEM(arg, i); - Py_INCREF(v); - res->ob_item[i] = v; + res->ob_item[i] = Py_NewRef(v); } Py_DECREF(arg); for (; i < max_len; ++i) { @@ -219,8 +218,7 @@ structseq_new_impl(PyTypeObject *type, PyObject *arg, PyObject *dict) ob = Py_None; } } - Py_INCREF(ob); - res->ob_item[i] = ob; + res->ob_item[i] = Py_NewRef(ob); } _PyObject_GC_TRACK(res); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 240af0a9075271..e1b9953226c0d7 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -61,8 +61,7 @@ tuple_alloc(Py_ssize_t size) static inline PyObject * tuple_get_empty(void) { - Py_INCREF(&_Py_SINGLETON(tuple_empty)); - return (PyObject *)&_Py_SINGLETON(tuple_empty); + return Py_NewRef(&_Py_SINGLETON(tuple_empty)); } PyObject * @@ -171,8 +170,7 @@ PyTuple_Pack(Py_ssize_t n, ...) items = result->ob_item; for (i = 0; i < n; i++) { o = va_arg(vargs, PyObject *); - Py_INCREF(o); - items[i] = o; + items[i] = Py_NewRef(o); } va_end(vargs); _PyObject_GC_TRACK(result); @@ -367,8 +365,7 @@ tupleitem(PyTupleObject *a, Py_ssize_t i) PyErr_SetString(PyExc_IndexError, "tuple index out of range"); return NULL; } - Py_INCREF(a->ob_item[i]); - return a->ob_item[i]; + return Py_NewRef(a->ob_item[i]); } PyObject * @@ -385,8 +382,7 @@ _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) PyObject **dst = tuple->ob_item; for (Py_ssize_t i = 0; i < n; i++) { PyObject *item = src[i]; - Py_INCREF(item); - dst[i] = item; + dst[i] = Py_NewRef(item); } _PyObject_GC_TRACK(tuple); return (PyObject *)tuple; @@ -425,8 +421,7 @@ tupleslice(PyTupleObject *a, Py_ssize_t ilow, if (ihigh < ilow) ihigh = ilow; if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) { - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow); } @@ -449,8 +444,7 @@ tupleconcat(PyTupleObject *a, PyObject *bb) PyObject **src, **dest; PyTupleObject *np; if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) { - Py_INCREF(bb); - return bb; + return Py_NewRef(bb); } if (!PyTuple_Check(bb)) { PyErr_Format(PyExc_TypeError, @@ -461,8 +455,7 @@ tupleconcat(PyTupleObject *a, PyObject *bb) PyTupleObject *b = (PyTupleObject *)bb; if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) { - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX); size = Py_SIZE(a) + Py_SIZE(b); @@ -478,15 +471,13 @@ tupleconcat(PyTupleObject *a, PyObject *bb) dest = np->ob_item; for (i = 0; i < Py_SIZE(a); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } src = b->ob_item; dest = np->ob_item + Py_SIZE(a); for (i = 0; i < Py_SIZE(b); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } _PyObject_GC_TRACK(np); return (PyObject *)np; @@ -500,8 +491,7 @@ tuplerepeat(PyTupleObject *a, Py_ssize_t n) if (PyTuple_CheckExact(a)) { /* Since tuples are immutable, we can return a shared copy in this case */ - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } } if (input_size == 0 || n <= 0) { @@ -747,8 +737,7 @@ tuple_subtype_new(PyTypeObject *type, PyObject *iterable) } for (i = 0; i < n; i++) { item = PyTuple_GET_ITEM(tmp, i); - Py_INCREF(item); - PyTuple_SET_ITEM(newobj, i, item); + PyTuple_SET_ITEM(newobj, i, Py_NewRef(item)); } Py_DECREF(tmp); @@ -799,8 +788,7 @@ tuplesubscript(PyTupleObject* self, PyObject* item) else if (start == 0 && step == 1 && slicelength == PyTuple_GET_SIZE(self) && PyTuple_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { PyTupleObject* result = tuple_alloc(slicelength); @@ -810,8 +798,7 @@ tuplesubscript(PyTupleObject* self, PyObject* item) dest = result->ob_item; for (cur = start, i = 0; i < slicelength; cur += step, i++) { - it = src[cur]; - Py_INCREF(it); + it = Py_NewRef(src[cur]); dest[i] = it; } @@ -1008,14 +995,9 @@ _PyTuple_ClearFreeList(PyInterpreterState *interp) /*********************** Tuple Iterator **************************/ -typedef struct { - PyObject_HEAD - Py_ssize_t it_index; - PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */ -} tupleiterobject; static void -tupleiter_dealloc(tupleiterobject *it) +tupleiter_dealloc(_PyTupleIterObject *it) { _PyObject_GC_UNTRACK(it); Py_XDECREF(it->it_seq); @@ -1023,14 +1005,14 @@ tupleiter_dealloc(tupleiterobject *it) } static int -tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg) +tupleiter_traverse(_PyTupleIterObject *it, visitproc visit, void *arg) { Py_VISIT(it->it_seq); return 0; } static PyObject * -tupleiter_next(tupleiterobject *it) +tupleiter_next(_PyTupleIterObject *it) { PyTupleObject *seq; PyObject *item; @@ -1044,8 +1026,7 @@ tupleiter_next(tupleiterobject *it) if (it->it_index < PyTuple_GET_SIZE(seq)) { item = PyTuple_GET_ITEM(seq, it->it_index); ++it->it_index; - Py_INCREF(item); - return item; + return Py_NewRef(item); } it->it_seq = NULL; @@ -1054,7 +1035,7 @@ tupleiter_next(tupleiterobject *it) } static PyObject * -tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) +tupleiter_len(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored)) { Py_ssize_t len = 0; if (it->it_seq) @@ -1065,7 +1046,7 @@ tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); static PyObject * -tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) +tupleiter_reduce(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored)) { if (it->it_seq) return Py_BuildValue("N(O)n", _PyEval_GetBuiltin(&_Py_ID(iter)), @@ -1075,7 +1056,7 @@ tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) } static PyObject * -tupleiter_setstate(tupleiterobject *it, PyObject *state) +tupleiter_setstate(_PyTupleIterObject *it, PyObject *state) { Py_ssize_t index = PyLong_AsSsize_t(state); if (index == -1 && PyErr_Occurred()) @@ -1103,7 +1084,7 @@ static PyMethodDef tupleiter_methods[] = { PyTypeObject PyTupleIter_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "tuple_iterator", /* tp_name */ - sizeof(tupleiterobject), /* tp_basicsize */ + sizeof(_PyTupleIterObject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ (destructor)tupleiter_dealloc, /* tp_dealloc */ @@ -1136,18 +1117,17 @@ PyTypeObject PyTupleIter_Type = { static PyObject * tuple_iter(PyObject *seq) { - tupleiterobject *it; + _PyTupleIterObject *it; if (!PyTuple_Check(seq)) { PyErr_BadInternalCall(); return NULL; } - it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type); + it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type); if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyTupleObject *)seq; + it->it_seq = (PyTupleObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 7f8f2c7846eb01..a96f993e99dd6d 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -4,6 +4,7 @@ #include "pycore_call.h" #include "pycore_code.h" // CO_FAST_FREE #include "pycore_compile.h" // _Py_Mangle() +#include "pycore_dict.h" // _PyDict_KeysSize() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_moduleobject.h" // _PyModule_GetDef() #include "pycore_object.h" // _PyType_HasFeature() @@ -43,9 +44,7 @@ class object "PyObject *" "&PyBaseObject_Type" PyUnicode_IS_READY(name) && \ (PyUnicode_GET_LENGTH(name) <= MCACHE_MAX_ATTR_SIZE) -// bpo-42745: next_version_tag remains shared by all interpreters because of static types -// Used to set PyTypeObject.tp_version_tag -static unsigned int next_version_tag = 1; +#define next_version_tag (_PyRuntime.types.next_version_tag) typedef struct PySlot_Offset { short subslot_offset; @@ -328,18 +327,6 @@ static unsigned int _PyType_ClearCache(PyInterpreterState *interp) { struct type_cache *cache = &interp->types.type_cache; -#if MCACHE_STATS - size_t total = cache->hits + cache->collisions + cache->misses; - fprintf(stderr, "-- Method cache hits = %zd (%d%%)\n", - cache->hits, (int) (100.0 * cache->hits / total)); - fprintf(stderr, "-- Method cache true misses = %zd (%d%%)\n", - cache->misses, (int) (100.0 * cache->misses / total)); - fprintf(stderr, "-- Method cache collisions = %zd (%d%%)\n", - cache->collisions, (int) (100.0 * cache->collisions / total)); - fprintf(stderr, "-- Method cache size = %zd KiB\n", - sizeof(cache->hashtable) / 1024); -#endif - // Set to None, rather than NULL, so _PyType_Lookup() can // use Py_SETREF() rather than using slower Py_XSETREF(). type_cache_clear(cache, Py_None); @@ -486,23 +473,24 @@ PyType_Modified(PyTypeObject *type) } } + // Notify registered type watchers, if any if (type->tp_watched) { PyInterpreterState *interp = _PyInterpreterState_GET(); int bits = type->tp_watched; int i = 0; - while(bits && i < TYPE_MAX_WATCHERS) { + while (bits) { + assert(i < TYPE_MAX_WATCHERS); if (bits & 1) { PyType_WatchCallback cb = interp->type_watchers[i]; if (cb && (cb(type) < 0)) { PyErr_WriteUnraisable((PyObject *)type); } } - i += 1; + i++; bits >>= 1; } } - type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG; type->tp_version_tag = 0; /* 0 is not a valid version tag */ } @@ -653,8 +641,7 @@ type_name(PyTypeObject *type, void *context) if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) { PyHeapTypeObject* et = (PyHeapTypeObject*)type; - Py_INCREF(et->ht_name); - return et->ht_name; + return Py_NewRef(et->ht_name); } else { return PyUnicode_FromString(_PyType_Name(type)); @@ -666,8 +653,7 @@ type_qualname(PyTypeObject *type, void *context) { if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) { PyHeapTypeObject* et = (PyHeapTypeObject*)type; - Py_INCREF(et->ht_qualname); - return et->ht_qualname; + return Py_NewRef(et->ht_qualname); } else { return PyUnicode_FromString(_PyType_Name(type)); @@ -699,8 +685,7 @@ type_set_name(PyTypeObject *type, PyObject *value, void *context) } type->tp_name = tp_name; - Py_INCREF(value); - Py_SETREF(((PyHeapTypeObject*)type)->ht_name, value); + Py_SETREF(((PyHeapTypeObject*)type)->ht_name, Py_NewRef(value)); return 0; } @@ -720,8 +705,7 @@ type_set_qualname(PyTypeObject *type, PyObject *value, void *context) } et = (PyHeapTypeObject*)type; - Py_INCREF(value); - Py_SETREF(et->ht_qualname, value); + Py_SETREF(et->ht_qualname, Py_NewRef(value)); return 0; } @@ -749,8 +733,7 @@ type_module(PyTypeObject *type, void *context) PyUnicode_InternInPlace(&mod); } else { - mod = &_Py_ID(builtins); - Py_INCREF(mod); + mod = Py_NewRef(&_Py_ID(builtins)); } } return mod; @@ -782,8 +765,7 @@ type_abstractmethods(PyTypeObject *type, void *context) } return NULL; } - Py_INCREF(mod); - return mod; + return Py_NewRef(mod); } static int @@ -821,8 +803,7 @@ type_set_abstractmethods(PyTypeObject *type, PyObject *value, void *context) static PyObject * type_get_bases(PyTypeObject *type, void *context) { - Py_INCREF(type->tp_bases); - return type->tp_bases; + return Py_NewRef(type->tp_bases); } static PyTypeObject *best_base(PyObject *); @@ -1016,8 +997,7 @@ type_set_bases(PyTypeObject *type, PyObject *new_bases, void *context) "", 2, 3, &cls, &new_mro, &old_mro); /* Do not rollback if cls has a newer version of MRO. */ if (cls->tp_mro == new_mro) { - Py_XINCREF(old_mro); - cls->tp_mro = old_mro; + cls->tp_mro = Py_XNewRef(old_mro); Py_DECREF(new_mro); } } @@ -1061,8 +1041,7 @@ type_get_doc(PyTypeObject *type, void *context) result = PyDict_GetItemWithError(type->tp_dict, &_Py_ID(__doc__)); if (result == NULL) { if (!PyErr_Occurred()) { - result = Py_None; - Py_INCREF(result); + result = Py_NewRef(Py_None); } } else if (Py_TYPE(result)->tp_descr_get) { @@ -1223,8 +1202,7 @@ type_repr(PyTypeObject *type) if (mod == NULL) PyErr_Clear(); else if (!PyUnicode_Check(mod)) { - Py_DECREF(mod); - mod = NULL; + Py_SETREF(mod, NULL); } name = type_qualname(type, NULL); if (name == NULL) { @@ -1264,8 +1242,7 @@ type_call(PyTypeObject *type, PyObject *args, PyObject *kwds) if (nargs == 1 && (kwds == NULL || !PyDict_GET_SIZE(kwds))) { obj = (PyObject *) Py_TYPE(PyTuple_GET_ITEM(args, 0)); - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } /* SF bug 475327 -- if that didn't trigger, we need 3 @@ -1299,8 +1276,7 @@ type_call(PyTypeObject *type, PyObject *args, PyObject *kwds) int res = type->tp_init(obj, args, kwds); if (res < 0) { assert(_PyErr_Occurred(tstate)); - Py_DECREF(obj); - obj = NULL; + Py_SETREF(obj, NULL); } else { assert(!_PyErr_Occurred(tstate)); @@ -2144,12 +2120,11 @@ mro_implementation(PyTypeObject *type) return NULL; } - Py_INCREF(type); - PyTuple_SET_ITEM(result, 0, (PyObject *) type); + ; + PyTuple_SET_ITEM(result, 0, Py_NewRef(type)); for (Py_ssize_t i = 0; i < k; i++) { PyObject *cls = PyTuple_GET_ITEM(base->tp_mro, i); - Py_INCREF(cls); - PyTuple_SET_ITEM(result, i + 1, cls); + PyTuple_SET_ITEM(result, i + 1, Py_NewRef(cls)); } return result; } @@ -2185,8 +2160,7 @@ mro_implementation(PyTypeObject *type) return NULL; } - Py_INCREF(type); - PyList_SET_ITEM(result, 0, (PyObject *)type); + PyList_SET_ITEM(result, 0, Py_NewRef(type)); if (pmerge(result, to_merge, n + 1) < 0) { Py_CLEAR(result); } @@ -2331,8 +2305,7 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro) /* Keep a reference to be able to do a reentrancy check below. Don't let old_mro be GC'ed and its address be reused for another object, like (suddenly!) a new tp_mro. */ - old_mro = type->tp_mro; - Py_XINCREF(old_mro); + old_mro = Py_XNewRef(type->tp_mro); new_mro = mro_invoke(type); /* might cause reentrance */ reent = (type->tp_mro != old_mro); Py_XDECREF(old_mro); @@ -2550,8 +2523,7 @@ subtype_setdict(PyObject *obj, PyObject *value, void *context) "not a '%.200s'", Py_TYPE(value)->tp_name); return -1; } - Py_XINCREF(value); - Py_XSETREF(*dictptr, value); + Py_XSETREF(*dictptr, Py_XNewRef(value)); return 0; } @@ -2578,8 +2550,7 @@ subtype_getweakref(PyObject *obj, void *context) result = Py_None; else result = *weaklistptr; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /* Three variants on the subtype_getsets list. */ @@ -4165,6 +4136,24 @@ find_name_in_mro(PyTypeObject *type, PyObject *name, int *error) return res; } +/* Check if the "readied" PyUnicode name + is a double-underscore special name. */ +static int +is_dunder_name(PyObject *name) +{ + Py_ssize_t length = PyUnicode_GET_LENGTH(name); + int kind = PyUnicode_KIND(name); + /* Special names contain at least "__x__" and are always ASCII. */ + if (length > 4 && kind == PyUnicode_1BYTE_KIND) { + const Py_UCS1 *characters = PyUnicode_1BYTE_DATA(name); + return ( + ((characters[length-2] == '_') && (characters[length-1] == '_')) && + ((characters[0] == '_') && (characters[1] == '_')) + ); + } + return 0; +} + /* Internal API to look for a name through the MRO. This returns a borrowed reference, and doesn't set an exception! */ PyObject * @@ -4178,12 +4167,13 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) struct type_cache_entry *entry = &cache->hashtable[h]; if (entry->version == type->tp_version_tag && entry->name == name) { -#if MCACHE_STATS - cache->hits++; -#endif assert(_PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG)); + OBJECT_STAT_INC_COND(type_cache_hits, !is_dunder_name(name)); + OBJECT_STAT_INC_COND(type_cache_dunder_hits, is_dunder_name(name)); return entry->value; } + OBJECT_STAT_INC_COND(type_cache_misses, !is_dunder_name(name)); + OBJECT_STAT_INC_COND(type_cache_dunder_misses, is_dunder_name(name)); /* We may end up clearing live exceptions below, so make sure it's ours. */ assert(!PyErr_Occurred()); @@ -4211,14 +4201,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) entry->version = type->tp_version_tag; entry->value = res; /* borrowed */ assert(_PyASCIIObject_CAST(name)->hash != -1); -#if MCACHE_STATS - if (entry->name != Py_None && entry->name != name) { - cache->collisions++; - } - else { - cache->misses++; - } -#endif + OBJECT_STAT_INC_COND(type_cache_collisions, entry->name != Py_None && entry->name != name); assert(_PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG)); Py_SETREF(entry->name, Py_NewRef(name)); } @@ -4235,24 +4218,6 @@ _PyType_LookupId(PyTypeObject *type, _Py_Identifier *name) return _PyType_Lookup(type, oname); } -/* Check if the "readied" PyUnicode name - is a double-underscore special name. */ -static int -is_dunder_name(PyObject *name) -{ - Py_ssize_t length = PyUnicode_GET_LENGTH(name); - int kind = PyUnicode_KIND(name); - /* Special names contain at least "__x__" and are always ASCII. */ - if (length > 4 && kind == PyUnicode_1BYTE_KIND) { - const Py_UCS1 *characters = PyUnicode_1BYTE_DATA(name); - return ( - ((characters[length-2] == '_') && (characters[length-1] == '_')) && - ((characters[0] == '_') && (characters[1] == '_')) - ); - } - return 0; -} - /* This is similar to PyObject_GenericGetAttr(), but uses _PyType_Lookup() instead of just looking in type->tp_dict. */ static PyObject * @@ -4698,16 +4663,17 @@ static PyObject * type___sizeof___impl(PyTypeObject *self) /*[clinic end generated code: output=766f4f16cd3b1854 input=99398f24b9cf45d6]*/ { - Py_ssize_t size; + size_t size; if (self->tp_flags & Py_TPFLAGS_HEAPTYPE) { PyHeapTypeObject* et = (PyHeapTypeObject*)self; size = sizeof(PyHeapTypeObject); if (et->ht_cached_keys) size += _PyDict_KeysSize(et->ht_cached_keys); } - else + else { size = sizeof(PyTypeObject); - return PyLong_FromSsize_t(size); + } + return PyLong_FromSize_t(size); } static PyMethodDef type_methods[] = { @@ -4955,9 +4921,10 @@ object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) PyObject *abstract_methods; PyObject *sorted_methods; PyObject *joined; + PyObject* comma_w_quotes_sep; Py_ssize_t method_count; - /* Compute ", ".join(sorted(type.__abstractmethods__)) + /* Compute "', '".join(sorted(type.__abstractmethods__)) into joined. */ abstract_methods = type_abstractmethods(type, NULL); if (abstract_methods == NULL) @@ -4970,22 +4937,28 @@ object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_DECREF(sorted_methods); return NULL; } - _Py_DECLARE_STR(comma_sep, ", "); - joined = PyUnicode_Join(&_Py_STR(comma_sep), sorted_methods); + comma_w_quotes_sep = PyUnicode_FromString("', '"); + joined = PyUnicode_Join(comma_w_quotes_sep, sorted_methods); method_count = PyObject_Length(sorted_methods); Py_DECREF(sorted_methods); - if (joined == NULL) + if (joined == NULL) { + Py_DECREF(comma_w_quotes_sep); return NULL; - if (method_count == -1) + } + if (method_count == -1) { + Py_DECREF(comma_w_quotes_sep); + Py_DECREF(joined); return NULL; + } PyErr_Format(PyExc_TypeError, "Can't instantiate abstract class %s " - "without an implementation for abstract method%s %U", + "without an implementation for abstract method%s '%U'", type->tp_name, method_count > 1 ? "s" : "", joined); Py_DECREF(joined); + Py_DECREF(comma_w_quotes_sep); return NULL; } PyObject *obj = type->tp_alloc(type, 0); @@ -5016,8 +4989,7 @@ object_repr(PyObject *self) if (mod == NULL) PyErr_Clear(); else if (!PyUnicode_Check(mod)) { - Py_DECREF(mod); - mod = NULL; + Py_SETREF(mod, NULL); } name = type_qualname(type, NULL); if (name == NULL) { @@ -5056,16 +5028,14 @@ object_richcompare(PyObject *self, PyObject *other, int op) /* Return NotImplemented instead of False, so if two objects are compared, both get a chance at the comparison. See issue #1393. */ - res = (self == other) ? Py_True : Py_NotImplemented; - Py_INCREF(res); + res = Py_NewRef((self == other) ? Py_True : Py_NotImplemented); break; case Py_NE: /* By default, __ne__() delegates to __eq__() and inverts the result, unless the latter returns NotImplemented. */ if (Py_TYPE(self)->tp_richcompare == NULL) { - res = Py_NotImplemented; - Py_INCREF(res); + res = Py_NewRef(Py_NotImplemented); break; } res = (*Py_TYPE(self)->tp_richcompare)(self, other, Py_EQ); @@ -5076,17 +5046,15 @@ object_richcompare(PyObject *self, PyObject *other, int op) res = NULL; else { if (ok) - res = Py_False; + res = Py_NewRef(Py_False); else - res = Py_True; - Py_INCREF(res); + res = Py_NewRef(Py_True); } } break; default: - res = Py_NotImplemented; - Py_INCREF(res); + res = Py_NewRef(Py_NotImplemented); break; } @@ -5096,8 +5064,7 @@ object_richcompare(PyObject *self, PyObject *other, int op) static PyObject * object_get_class(PyObject *self, void *closure) { - Py_INCREF(Py_TYPE(self)); - return (PyObject *)(Py_TYPE(self)); + return Py_NewRef(Py_TYPE(self)); } static int @@ -5352,8 +5319,7 @@ _PyType_GetSlotNames(PyTypeObject *cls) cls->tp_name, Py_TYPE(slotnames)->tp_name); return NULL; } - Py_INCREF(slotnames); - return slotnames; + return Py_NewRef(slotnames); } else { if (PyErr_Occurred()) { @@ -5399,8 +5365,7 @@ object_getstate_default(PyObject *obj, int required) } if (_PyObject_IsInstanceDictEmpty(obj)) { - state = Py_None; - Py_INCREF(state); + state = Py_NewRef(Py_None); } else { state = PyObject_GenericGetDict(obj, NULL); @@ -5454,8 +5419,7 @@ object_getstate_default(PyObject *obj, int required) for (i = 0; i < slotnames_size; i++) { PyObject *name, *value; - name = PyList_GET_ITEM(slotnames, i); - Py_INCREF(name); + name = Py_NewRef(PyList_GET_ITEM(slotnames, i)); if (_PyObject_LookupAttr(obj, name, &value) < 0) { Py_DECREF(name); goto error; @@ -5587,10 +5551,8 @@ _PyObject_GetNewArguments(PyObject *obj, PyObject **args, PyObject **kwargs) Py_DECREF(newargs); return -1; } - *args = PyTuple_GET_ITEM(newargs, 0); - Py_INCREF(*args); - *kwargs = PyTuple_GET_ITEM(newargs, 1); - Py_INCREF(*kwargs); + *args = Py_NewRef(PyTuple_GET_ITEM(newargs, 0)); + *kwargs = Py_NewRef(PyTuple_GET_ITEM(newargs, 1)); Py_DECREF(newargs); /* XXX We should perhaps allow None to be passed here. */ @@ -5658,8 +5620,7 @@ _PyObject_GetItemsIter(PyObject *obj, PyObject **listitems, } if (!PyList_Check(obj)) { - *listitems = Py_None; - Py_INCREF(*listitems); + *listitems = Py_NewRef(Py_None); } else { *listitems = PyObject_GetIter(obj); @@ -5668,8 +5629,7 @@ _PyObject_GetItemsIter(PyObject *obj, PyObject **listitems, } if (!PyDict_Check(obj)) { - *dictitems = Py_None; - Py_INCREF(*dictitems); + *dictitems = Py_NewRef(Py_None); } else { PyObject *items = PyObject_CallMethodNoArgs(obj, &_Py_ID(items)); @@ -5734,12 +5694,10 @@ reduce_newobj(PyObject *obj) return NULL; } cls = (PyObject *) Py_TYPE(obj); - Py_INCREF(cls); - PyTuple_SET_ITEM(newargs, 0, cls); + PyTuple_SET_ITEM(newargs, 0, Py_NewRef(cls)); for (i = 0; i < n; i++) { PyObject *v = PyTuple_GET_ITEM(args, i); - Py_INCREF(v); - PyTuple_SET_ITEM(newargs, i+1, v); + PyTuple_SET_ITEM(newargs, i+1, Py_NewRef(v)); } Py_XDECREF(args); } @@ -5847,7 +5805,8 @@ static PyObject * object___reduce_ex___impl(PyObject *self, int protocol) /*[clinic end generated code: output=2e157766f6b50094 input=f326b43fb8a4c5ff]*/ { - static PyObject *objreduce; +#define objreduce \ + (_Py_INTERP_CACHED_OBJECT(_PyInterpreterState_Get(), objreduce)) PyObject *reduce, *res; if (objreduce == NULL) { @@ -5883,6 +5842,7 @@ object___reduce_ex___impl(PyObject *self, int protocol) } return _common_reduce(self, protocol); +#undef objreduce } static PyObject * @@ -5989,8 +5949,7 @@ object___dir___impl(PyObject *self) else { /* Copy __dict__ to avoid mutating it. */ PyObject *temp = PyDict_Copy(dict); - Py_DECREF(dict); - dict = temp; + Py_SETREF(dict, temp); } if (dict == NULL) @@ -8129,8 +8088,7 @@ slot_tp_hash(PyObject *self) func = lookup_maybe_method(self, &_Py_ID(__hash__), &unbound); if (func == Py_None) { - Py_DECREF(func); - func = NULL; + Py_SETREF(func, NULL); } if (func == NULL) { @@ -8374,8 +8332,7 @@ slot_tp_descr_get(PyObject *self, PyObject *obj, PyObject *type) /* Avoid further slowdowns */ if (tp->tp_descr_get == slot_tp_descr_get) tp->tp_descr_get = NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } if (obj == NULL) obj = Py_None; @@ -8544,8 +8501,6 @@ __ne__ etc. all map to tp_richcompare) and one name may map to multiple slots an all-zero entry. */ -typedef struct wrapperbase slotdef; - #undef TPSLOT #undef FLSLOT #undef AMSLOT @@ -8594,7 +8549,7 @@ typedef struct wrapperbase slotdef; ETSLOT(NAME, as_number.SLOT, FUNCTION, wrap_binaryfunc_r, \ #NAME "($self, value, /)\n--\n\n" DOC) -static slotdef slotdefs[] = { +static pytype_slotdef slotdefs[] = { TPSLOT(__getattribute__, tp_getattr, NULL, NULL, ""), TPSLOT(__getattr__, tp_getattr, NULL, NULL, ""), TPSLOT(__setattr__, tp_setattr, NULL, NULL, ""), @@ -8770,7 +8725,7 @@ static slotdef slotdefs[] = { SQSLOT(__delitem__, sq_ass_item, slot_sq_ass_item, wrap_sq_delitem, "__delitem__($self, key, /)\n--\n\nDelete self[key]."), SQSLOT(__contains__, sq_contains, slot_sq_contains, wrap_objobjproc, - "__contains__($self, key, /)\n--\n\nReturn key in self."), + "__contains__($self, key, /)\n--\n\nReturn bool(key in self)."), SQSLOT(__iadd__, sq_inplace_concat, NULL, wrap_binaryfunc, "__iadd__($self, value, /)\n--\n\nImplement self+=value."), @@ -8819,12 +8774,6 @@ slotptr(PyTypeObject *type, int ioffset) return (void **)ptr; } -/* Length of array of slotdef pointers used to store slots with the - same __name__. There should be at most MAX_EQUIV-1 slotdef entries with - the same __name__, for any __name__. Since that's a static property, it is - appropriate to declare fixed-size arrays for this. */ -#define MAX_EQUIV 10 - /* Return a slot pointer for a given name, but ONLY if the attribute has exactly one slot function. The name must be an interned string. */ static void ** @@ -8833,9 +8782,10 @@ resolve_slotdups(PyTypeObject *type, PyObject *name) /* XXX Maybe this could be optimized more -- but is it worth it? */ /* pname and ptrs act as a little cache */ - static PyObject *pname; - static slotdef *ptrs[MAX_EQUIV]; - slotdef *p, **pp; + PyInterpreterState *interp = _PyInterpreterState_Get(); +#define pname _Py_INTERP_CACHED_OBJECT(interp, type_slots_pname) +#define ptrs _Py_INTERP_CACHED_OBJECT(interp, type_slots_ptrs) + pytype_slotdef *p, **pp; void **res, **ptr; if (pname != name) { @@ -8862,6 +8812,8 @@ resolve_slotdups(PyTypeObject *type, PyObject *name) res = ptr; } return res; +#undef pname +#undef ptrs } @@ -8919,8 +8871,8 @@ resolve_slotdups(PyTypeObject *type, PyObject *name) * When done, return a pointer to the next slotdef with a different offset, * because that's convenient for fixup_slot_dispatchers(). This function never * sets an exception: if an internal error happens (unlikely), it's ignored. */ -static slotdef * -update_one_slot(PyTypeObject *type, slotdef *p) +static pytype_slotdef * +update_one_slot(PyTypeObject *type, pytype_slotdef *p) { PyObject *descr; PyWrapperDescrObject *d; @@ -9035,7 +8987,7 @@ update_one_slot(PyTypeObject *type, slotdef *p) static int update_slots_callback(PyTypeObject *type, void *data) { - slotdef **pp = (slotdef **)data; + pytype_slotdef **pp = (pytype_slotdef **)data; for (; *pp; pp++) { update_one_slot(type, *pp); } @@ -9046,9 +8998,9 @@ update_slots_callback(PyTypeObject *type, void *data) static int update_slot(PyTypeObject *type, PyObject *name) { - slotdef *ptrs[MAX_EQUIV]; - slotdef *p; - slotdef **pp; + pytype_slotdef *ptrs[MAX_EQUIV]; + pytype_slotdef *p; + pytype_slotdef **pp; int offset; assert(PyUnicode_CheckExact(name)); @@ -9085,7 +9037,7 @@ static void fixup_slot_dispatchers(PyTypeObject *type) { assert(!PyErr_Occurred()); - for (slotdef *p = slotdefs; p->name; ) { + for (pytype_slotdef *p = slotdefs; p->name; ) { p = update_one_slot(type, p); } } @@ -9093,7 +9045,7 @@ fixup_slot_dispatchers(PyTypeObject *type) static void update_all_slots(PyTypeObject* type) { - slotdef *p; + pytype_slotdef *p; /* Clear the VALID_VERSION flag of 'type' and all its subclasses. */ PyType_Modified(type); @@ -9264,7 +9216,7 @@ static int add_operators(PyTypeObject *type) { PyObject *dict = type->tp_dict; - slotdef *p; + pytype_slotdef *p; PyObject *descr; void **ptr; @@ -9404,8 +9356,7 @@ super_getattro(PyObject *self, PyObject *name) (See SF ID #743627) */ (su->obj == (PyObject *)starttype) ? NULL : su->obj, (PyObject *)starttype); - Py_DECREF(res); - res = res2; + Py_SETREF(res, res2); } Py_DECREF(mro); @@ -9444,14 +9395,12 @@ supercheck(PyTypeObject *type, PyObject *obj) /* Check for first bullet above (special case) */ if (PyType_Check(obj) && PyType_IsSubtype((PyTypeObject *)obj, type)) { - Py_INCREF(obj); - return (PyTypeObject *)obj; + return (PyTypeObject *)Py_NewRef(obj); } /* Normal case */ if (PyType_IsSubtype(Py_TYPE(obj), type)) { - Py_INCREF(Py_TYPE(obj)); - return Py_TYPE(obj); + return (PyTypeObject*)Py_NewRef(Py_TYPE(obj)); } else { /* Try the slow way */ @@ -9487,8 +9436,7 @@ super_descr_get(PyObject *self, PyObject *obj, PyObject *type) if (obj == NULL || obj == Py_None || su->obj != NULL) { /* Not binding to an object, or already bound */ - Py_INCREF(self); - return self; + return Py_NewRef(self); } if (!Py_IS_TYPE(su, &PySuper_Type)) /* If su is an instance of a (strict) subclass of super, @@ -9504,10 +9452,8 @@ super_descr_get(PyObject *self, PyObject *obj, PyObject *type) NULL, NULL); if (newobj == NULL) return NULL; - Py_INCREF(su->type); - Py_INCREF(obj); - newobj->type = su->type; - newobj->obj = obj; + newobj->type = (PyTypeObject*)Py_NewRef(su->type); + newobj->obj = Py_NewRef(obj); newobj->obj_type = obj_type; return (PyObject *)newobj; } @@ -9631,8 +9577,7 @@ super_init_impl(PyObject *self, PyTypeObject *type, PyObject *obj) { return -1; Py_INCREF(obj); } - Py_INCREF(type); - Py_XSETREF(su->type, type); + Py_XSETREF(su->type, (PyTypeObject*)Py_NewRef(type)); Py_XSETREF(su->obj, obj); Py_XSETREF(su->obj_type, obj_type); return 0; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 9dd0c42a0acda2..f0c7aa7707fdb5 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -52,9 +52,9 @@ OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. #include "pycore_pathconfig.h" // _Py_DumpPathConfig() #include "pycore_pylifecycle.h" // _Py_SetFileSystemEncoding() #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_runtime_init.h" // _PyUnicode_InitStaticStrings() #include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI #include "pycore_unicodeobject.h" // struct _Py_unicode_state +#include "pycore_unicodeobject_generated.h" // _PyUnicode_InitStaticStrings() #include "stringlib/eq.h" // unicode_eq() #ifdef MS_WINDOWS @@ -221,8 +221,7 @@ static inline PyObject* unicode_get_empty(void) static inline PyObject* unicode_new_empty(void) { PyObject *empty = unicode_get_empty(); - Py_INCREF(empty); - return empty; + return Py_NewRef(empty); } /* This dictionary holds all interned unicode strings. Note that references @@ -234,12 +233,12 @@ static inline PyObject* unicode_new_empty(void) */ static inline PyObject *get_interned_dict(void) { - return _PyRuntime.global_objects.interned; + return _Py_CACHED_OBJECT(interned_strings); } static inline void set_interned_dict(PyObject *dict) { - _PyRuntime.global_objects.interned = dict; + _Py_CACHED_OBJECT(interned_strings) = dict; } #define _Py_RETURN_UNICODE_EMPTY() \ @@ -611,8 +610,7 @@ static PyObject* unicode_result_unchanged(PyObject *unicode) { if (PyUnicode_CheckExact(unicode)) { - Py_INCREF(unicode); - return unicode; + return Py_NewRef(unicode); } else /* Subtype -- return genuine unicode string with the same value. */ @@ -1858,7 +1856,7 @@ _PyUnicode_FromId(_Py_Identifier *id) Py_ssize_t index = _Py_atomic_size_get(&id->index); if (index < 0) { - struct _Py_unicode_runtime_ids *rt_ids = &interp->runtime->unicode_ids; + struct _Py_unicode_runtime_ids *rt_ids = &interp->runtime->unicode_state.ids; PyThread_acquire_lock(rt_ids->lock, WAIT_LOCK); // Check again to detect concurrent access. Another thread can have @@ -2947,8 +2945,7 @@ PyUnicode_FromObject(PyObject *obj) /* XXX Perhaps we should make this API an alias of PyObject_Str() instead ?! */ if (PyUnicode_CheckExact(obj)) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } if (PyUnicode_Check(obj)) { /* For a Unicode subtype that's not a Unicode object, @@ -4533,6 +4530,9 @@ unicode_decode_utf8(const char *s, Py_ssize_t size, } s += ascii_decode(s, end, PyUnicode_1BYTE_DATA(u)); if (s == end) { + if (consumed) { + *consumed = size; + } return u; } @@ -5697,8 +5697,6 @@ PyUnicode_AsUTF16String(PyObject *unicode) /* --- Unicode Escape Codec ----------------------------------------------- */ -static _PyUnicode_Name_CAPI *ucnhash_capi = NULL; - PyObject * _PyUnicode_DecodeUnicodeEscapeInternal(const char *s, Py_ssize_t size, @@ -5711,6 +5709,8 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s, const char *end; PyObject *errorHandler = NULL; PyObject *exc = NULL; + _PyUnicode_Name_CAPI *ucnhash_capi; + PyInterpreterState *interp = _PyInterpreterState_Get(); // so we can remember if we've seen an invalid escape char or not *first_invalid_escape = NULL; @@ -5858,6 +5858,7 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s, /* \N{name} */ case 'N': + ucnhash_capi = interp->unicode.ucnhash_capi; if (ucnhash_capi == NULL) { /* load the unicode data module */ ucnhash_capi = (_PyUnicode_Name_CAPI *)PyCapsule_Import( @@ -5869,6 +5870,7 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s, ); goto onError; } + interp->unicode.ucnhash_capi = ucnhash_capi; } message = "malformed \\N character escape"; @@ -8668,8 +8670,7 @@ _PyUnicode_TransformDecimalAndSpaceToASCII(PyObject *unicode) } if (PyUnicode_IS_ASCII(unicode)) { /* If the string is already ASCII, just return the same string */ - Py_INCREF(unicode); - return unicode; + return Py_NewRef(unicode); } Py_ssize_t len = PyUnicode_GET_LENGTH(unicode); @@ -9413,8 +9414,7 @@ _PyUnicode_JoinArray(PyObject *separator, PyObject *const *items, Py_ssize_t seq if (seqlen == 1) { if (PyUnicode_CheckExact(items[0])) { res = items[0]; - Py_INCREF(res); - return res; + return Py_NewRef(res); } seplen = 0; maxchar = 0; @@ -9733,8 +9733,7 @@ split(PyObject *self, out = PyList_New(1); if (out == NULL) return NULL; - Py_INCREF(self); - PyList_SET_ITEM(out, 0, self); + PyList_SET_ITEM(out, 0, Py_NewRef(self)); return out; } buf1 = PyUnicode_DATA(self); @@ -9826,8 +9825,7 @@ rsplit(PyObject *self, out = PyList_New(1); if (out == NULL) return NULL; - Py_INCREF(self); - PyList_SET_ITEM(out, 0, self); + PyList_SET_ITEM(out, 0, Py_NewRef(self)); return out; } buf1 = PyUnicode_DATA(self); @@ -10746,8 +10744,7 @@ PyUnicode_Append(PyObject **p_left, PyObject *right) PyObject *empty = unicode_get_empty(); // Borrowed reference if (left == empty) { Py_DECREF(left); - Py_INCREF(right); - *p_left = right; + *p_left = Py_NewRef(right); return; } if (right == empty) { @@ -12449,7 +12446,7 @@ unicode_rsplit_impl(PyObject *self, PyObject *sep, Py_ssize_t maxsplit) /*[clinic input] str.splitlines as unicode_splitlines - keepends: bool(accept={int}) = False + keepends: bool = False Return a list of the lines in the string, breaking at line boundaries. @@ -12459,7 +12456,7 @@ true. static PyObject * unicode_splitlines_impl(PyObject *self, int keepends) -/*[clinic end generated code: output=f664dcdad153ec40 input=b508e180459bdd8b]*/ +/*[clinic end generated code: output=f664dcdad153ec40 input=ba6ad05ee85d2b55]*/ { return PyUnicode_Splitlines(self, keepends); } @@ -12977,8 +12974,7 @@ _PyUnicodeWriter_WriteStr(_PyUnicodeWriter *writer, PyObject *str) if (writer->buffer == NULL && !writer->overallocate) { assert(_PyUnicode_CheckConsistency(str, 1)); writer->readonly = 1; - Py_INCREF(str); - writer->buffer = str; + writer->buffer = Py_NewRef(str); _PyUnicodeWriter_Update(writer); writer->pos += len; return 0; @@ -13581,8 +13577,7 @@ _PyUnicode_FormatLong(PyObject *val, int alt, int prec, int type) for (i = 0; i < numdigits; i++) *b1++ = *buf++; *b1 = '\0'; - Py_DECREF(result); - result = r1; + Py_SETREF(result, r1); buf = PyBytes_AS_STRING(result); len = numnondigits + prec; } @@ -13599,8 +13594,7 @@ _PyUnicode_FormatLong(PyObject *val, int alt, int prec, int type) || buf != PyUnicode_DATA(result)) { PyObject *unicode; unicode = _PyUnicode_FromASCII(buf, len); - Py_DECREF(result); - result = unicode; + Py_SETREF(result, unicode); } else if (len != PyUnicode_GET_LENGTH(result)) { if (PyUnicode_Resize(&result, len) < 0) @@ -13641,8 +13635,7 @@ mainformatlong(PyObject *v, assert(PyLong_Check(iobj)); } else { - iobj = v; - Py_INCREF(iobj); + iobj = Py_NewRef(v); } if (PyLong_CheckExact(v) @@ -13965,8 +13958,7 @@ unicode_format_arg_format(struct unicode_formatter_t *ctx, } if (PyUnicode_CheckExact(v) && arg->ch == 's') { - *p_str = v; - Py_INCREF(*p_str); + *p_str = Py_NewRef(v); } else { if (arg->ch == 's') @@ -14502,12 +14494,14 @@ PyTypeObject PyUnicode_Type = { /* Initialize the Unicode implementation */ -void -_PyUnicode_InitState(PyInterpreterState *interp) +static void +_init_global_state(void) { - if (!_Py_IsMainInterpreter(interp)) { + static int initialized = 0; + if (initialized) { return; } + initialized = 1; /* initialize the linebreak bloom filter */ const Py_UCS2 linebreak[] = { @@ -14525,6 +14519,15 @@ _PyUnicode_InitState(PyInterpreterState *interp) Py_ARRAY_LENGTH(linebreak)); } +void +_PyUnicode_InitState(PyInterpreterState *interp) +{ + if (!_Py_IsMainInterpreter(interp)) { + return; + } + _init_global_state(); +} + PyStatus _PyUnicode_InitGlobalObjects(PyInterpreterState *interp) @@ -14616,8 +14619,7 @@ PyUnicode_InternInPlace(PyObject **p) } if (t != s) { - Py_INCREF(t); - Py_SETREF(*p, t); + Py_SETREF(*p, Py_NewRef(t)); return; } @@ -14887,8 +14889,7 @@ unicode_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = seq; + it->it_seq = Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } @@ -15129,10 +15130,10 @@ _PyUnicode_Fini(PyInterpreterState *interp) assert(get_interned_dict() == NULL); // bpo-47182: force a unicodedata CAPI capsule re-import on // subsequent initialization of main interpreter. - ucnhash_capi = NULL; } _PyUnicode_FiniEncodings(&state->fs_codec); + interp->unicode.ucnhash_capi = NULL; unicode_clear_identifiers(state); } diff --git a/Objects/unionobject.c b/Objects/unionobject.c index 5eee27c08fa7e2..f273f7d15ef25f 100644 --- a/Objects/unionobject.c +++ b/Objects/unionobject.c @@ -114,12 +114,10 @@ merge(PyObject **items1, Py_ssize_t size1, } for (; pos < size1; pos++) { PyObject *a = items1[pos]; - Py_INCREF(a); - PyTuple_SET_ITEM(tuple, pos, a); + PyTuple_SET_ITEM(tuple, pos, Py_NewRef(a)); } } - Py_INCREF(arg); - PyTuple_SET_ITEM(tuple, pos, arg); + PyTuple_SET_ITEM(tuple, pos, Py_NewRef(arg)); pos++; } @@ -170,8 +168,7 @@ _Py_union_type_or(PyObject* self, PyObject* other) if (PyErr_Occurred()) { return NULL; } - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyObject *new_union = make_union(tuple); @@ -298,8 +295,7 @@ union_getitem(PyObject *self, PyObject *item) res = make_union(newargs); } else { - res = PyTuple_GET_ITEM(newargs, 0); - Py_INCREF(res); + res = Py_NewRef(PyTuple_GET_ITEM(newargs, 0)); for (Py_ssize_t iarg = 1; iarg < nargs; iarg++) { PyObject *arg = PyTuple_GET_ITEM(newargs, iarg); Py_SETREF(res, PyNumber_Or(res, arg)); @@ -326,8 +322,7 @@ union_parameters(PyObject *self, void *Py_UNUSED(unused)) return NULL; } } - Py_INCREF(alias->parameters); - return alias->parameters; + return Py_NewRef(alias->parameters); } static PyGetSetDef union_properties[] = { @@ -400,9 +395,8 @@ make_union(PyObject *args) return NULL; } - Py_INCREF(args); result->parameters = NULL; - result->args = args; + result->args = Py_NewRef(args); _PyObject_GC_TRACK(result); return (PyObject*)result; } diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index cf89a9231d204d..bd7720e2753307 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -311,8 +311,7 @@ weakref___new__(PyTypeObject *type, PyObject *args, PyObject *kwargs) if (callback == NULL && type == &_PyWeakref_RefType) { if (ref != NULL) { /* We can re-use an existing reference. */ - Py_INCREF(ref); - return (PyObject *)ref; + return Py_NewRef(ref); } } /* We have to create a new reference. */ @@ -825,9 +824,7 @@ PyWeakref_NewRef(PyObject *ob, PyObject *callback) during GC. Return that one instead of this one to avoid violating the invariants of the list of weakrefs for ob. */ - Py_DECREF(result); - Py_INCREF(ref); - result = ref; + Py_SETREF(result, (PyWeakReference*)Py_NewRef(ref)); } } else { @@ -890,9 +887,7 @@ PyWeakref_NewProxy(PyObject *ob, PyObject *callback) during GC. Return that one instead of this one to avoid violating the invariants of the list of weakrefs for ob. */ - Py_DECREF(result); - result = proxy; - Py_INCREF(result); + Py_SETREF(result, (PyWeakReference*)Py_NewRef(proxy)); goto skip_insert; } prev = ref; @@ -993,8 +988,7 @@ PyObject_ClearWeakRefs(PyObject *object) PyWeakReference *next = current->wr_next; if (Py_REFCNT((PyObject *)current) > 0) { - Py_INCREF(current); - PyTuple_SET_ITEM(tuple, i * 2, (PyObject *) current); + PyTuple_SET_ITEM(tuple, i * 2, Py_NewRef(current)); PyTuple_SET_ITEM(tuple, i * 2 + 1, current->wr_callback); } else { diff --git a/PC/_msi.c b/PC/_msi.c index 3686b9bb6f2a6d..b104e3c6ef548c 100644 --- a/PC/_msi.c +++ b/PC/_msi.c @@ -172,9 +172,7 @@ static FNFCIGETTEMPFILE(cb_gettempfile) static FNFCISTATUS(cb_status) { if (pv) { - _Py_IDENTIFIER(status); - - PyObject *result = _PyObject_CallMethodId(pv, &PyId_status, "iii", typeStatus, cb1, cb2); + PyObject *result = PyObject_CallMethod(pv, "status", "iii", typeStatus, cb1, cb2); if (result == NULL) return -1; Py_DECREF(result); @@ -185,9 +183,7 @@ static FNFCISTATUS(cb_status) static FNFCIGETNEXTCABINET(cb_getnextcabinet) { if (pv) { - _Py_IDENTIFIER(getnextcabinet); - - PyObject *result = _PyObject_CallMethodId(pv, &PyId_getnextcabinet, "i", pccab->iCab); + PyObject *result = PyObject_CallMethod(pv, "getnextcabinet", "i", pccab->iCab); if (result == NULL) return -1; if (!PyBytes_Check(result)) { @@ -705,8 +701,7 @@ _msi_SummaryInformation_GetProperty_impl(msiobj *self, int field) result = PyBytes_FromStringAndSize(sval, ssize); break; case VT_EMPTY: - Py_INCREF(Py_None); - result = Py_None; + result = Py_NewRef(Py_None); break; default: PyErr_Format(PyExc_NotImplementedError, "result of type %d", type); diff --git a/PC/launcher2.c b/PC/launcher2.c index 5bcd2ba8a06778..9b3db04aa48b72 100644 --- a/PC/launcher2.c +++ b/PC/launcher2.c @@ -491,62 +491,39 @@ dumpSearchInfo(SearchInfo *search) int -findArgumentLength(const wchar_t *buffer, int bufferLength) +findArgv0Length(const wchar_t *buffer, int bufferLength) { - if (bufferLength < 0) { - bufferLength = (int)wcsnlen_s(buffer, MAXLEN); - } - if (bufferLength == 0) { - return 0; - } - const wchar_t *end; - int i; - - if (buffer[0] != L'"') { - end = wcschr(buffer, L' '); - if (!end) { - return bufferLength; - } - i = (int)(end - buffer); - return i < bufferLength ? i : bufferLength; - } - - i = 0; - while (i < bufferLength) { - end = wcschr(&buffer[i + 1], L'"'); - if (!end) { - return bufferLength; - } - - i = (int)(end - buffer); - if (i >= bufferLength) { - return bufferLength; - } - - int j = i; - while (j > 1 && buffer[--j] == L'\\') { - if (j > 0 && buffer[--j] == L'\\') { - // Even number, so back up and keep counting - } else { - // Odd number, so it's escaped and we want to keep searching - continue; + // Note: this implements semantics that are only valid for argv0. + // Specifically, there is no escaping of quotes, and quotes within + // the argument have no effect. A quoted argv0 must start and end + // with a double quote character; otherwise, it ends at the first + // ' ' or '\t'. + int quoted = buffer[0] == L'"'; + for (int i = 1; bufferLength < 0 || i < bufferLength; ++i) { + switch (buffer[i]) { + case L'\0': + return i; + case L' ': + case L'\t': + if (!quoted) { + return i; } - } - - // Non-escaped quote with space after it - end of the argument! - if (i + 1 >= bufferLength || isspace(buffer[i + 1])) { - return i + 1; + break; + case L'"': + if (quoted) { + return i + 1; + } + break; } } - return bufferLength; } const wchar_t * -findArgumentEnd(const wchar_t *buffer, int bufferLength) +findArgv0End(const wchar_t *buffer, int bufferLength) { - return &buffer[findArgumentLength(buffer, bufferLength)]; + return &buffer[findArgv0Length(buffer, bufferLength)]; } @@ -562,11 +539,16 @@ parseCommandLine(SearchInfo *search) return RC_NO_COMMANDLINE; } - const wchar_t *tail = findArgumentEnd(search->originalCmdLine, -1); - const wchar_t *end = tail; - search->restOfCmdLine = tail; + const wchar_t *argv0End = findArgv0End(search->originalCmdLine, -1); + const wchar_t *tail = argv0End; // will be start of the executable name + const wchar_t *end = argv0End; // will be end of the executable name + search->restOfCmdLine = argv0End; // will be first space after argv0 while (--tail != search->originalCmdLine) { - if (*tail == L'.' && end == search->restOfCmdLine) { + if (*tail == L'"' && end == argv0End) { + // Move the "end" up to the quote, so we also allow moving for + // a period later on. + end = argv0End = tail; + } else if (*tail == L'.' && end == argv0End) { end = tail; } else if (*tail == L'\\' || *tail == L'/') { ++tail; diff --git a/PC/layout/support/options.py b/PC/layout/support/options.py index 3d93e892adae8a..26d13f5377ad59 100644 --- a/PC/layout/support/options.py +++ b/PC/layout/support/options.py @@ -18,7 +18,6 @@ def public(f): "stable": {"help": "stable ABI stub"}, "pip": {"help": "pip"}, "pip-user": {"help": "pip.ini file for default --user"}, - "distutils": {"help": "distutils"}, "tcltk": {"help": "Tcl, Tk and tkinter"}, "idle": {"help": "Idle"}, "tests": {"help": "test suite"}, @@ -43,7 +42,6 @@ def public(f): "stable", "pip", "pip-user", - "distutils", "tcltk", "idle", "venv", @@ -59,7 +57,6 @@ def public(f): "dev", "pip", "stable", - "distutils", "venv", "props", "nuspec", @@ -71,7 +68,6 @@ def public(f): "options": [ "stable", "pip", - "distutils", "tcltk", "idle", "tests", diff --git a/PC/layout/support/props.py b/PC/layout/support/props.py index 1eb9b7c06da513..c7a7a0ce777a6e 100644 --- a/PC/layout/support/props.py +++ b/PC/layout/support/props.py @@ -36,7 +36,6 @@ <PythonVersion>{PYTHON_VERSION}</PythonVersion> <IncludePythonExe Condition="$(IncludePythonExe) == ''">true</IncludePythonExe> - <IncludeDistutils Condition="$(IncludeDistutils) == ''">false</IncludeDistutils> <IncludeLib2To3 Condition="$(IncludeLib2To3) == ''">false</IncludeLib2To3> <IncludeVEnv Condition="$(IncludeVEnv) == ''">false</IncludeVEnv> @@ -68,7 +67,6 @@ <Link>DLLs\%(Filename)%(Extension)</Link> </_PythonRuntimeDlls> <_PythonRuntimeLib Include="$(PythonHome)\Lib\**\*" Exclude="$(PythonHome)\Lib\**\*.pyc;$(PythonHome)\Lib\site-packages\**\*" /> - <_PythonRuntimeLib Remove="$(PythonHome)\Lib\distutils\**\*" Condition="$(IncludeDistutils) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\lib2to3\**\*" Condition="$(IncludeLib2To3) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\ensurepip\**\*" Condition="$(IncludeVEnv) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\venv\**\*" Condition="$(IncludeVEnv) != 'true'" /> diff --git a/PC/layout/support/python.props b/PC/layout/support/python.props index 4cc70083ebe693..e46891aafcb9fc 100644 --- a/PC/layout/support/python.props +++ b/PC/layout/support/python.props @@ -6,9 +6,8 @@ <PythonLibs>$(PythonHome)\libs</PythonLibs> <PythonTag>$$PYTHON_TAG$$</PythonTag> <PythonVersion>$$PYTHON_VERSION$$</PythonVersion> - + <IncludePythonExe Condition="$(IncludePythonExe) == ''">true</IncludePythonExe> - <IncludeDistutils Condition="$(IncludeDistutils) == ''">false</IncludeDistutils> <IncludeLib2To3 Condition="$(IncludeLib2To3) == ''">false</IncludeLib2To3> <IncludeVEnv Condition="$(IncludeVEnv) == ''">false</IncludeVEnv> @@ -41,7 +40,6 @@ <Link>DLLs\%(Filename)%(Extension)</Link> </_PythonRuntimeDlls> <_PythonRuntimeLib Include="$(PythonHome)\Lib\**\*" Exclude="$(PythonHome)\Lib\**\*.pyc;$(PythonHome)\Lib\site-packages\**\*" /> - <_PythonRuntimeLib Remove="$(PythonHome)\Lib\distutils\**\*" Condition="$(IncludeDistutils) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\lib2to3\**\*" Condition="$(IncludeLib2To3) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\ensurepip\**\*" Condition="$(IncludeVEnv) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\venv\**\*" Condition="$(IncludeVEnv) != 'true'" /> @@ -50,7 +48,7 @@ </_PythonRuntimeLib> <PythonRuntime Include="@(_PythonRuntimeExe);@(_PythonRuntimeDlls);@(_PythonRuntimeLib)" /> </ItemGroup> - + <Message Importance="low" Text="Collected Python runtime from $(PythonHome):%0D%0A@(PythonRuntime->' %(Link)','%0D%0A')" /> </Target> </Project> diff --git a/PC/pyconfig.h b/PC/pyconfig.h index 1a33d4c5a1e4fc..1d8408b363a66a 100644 --- a/PC/pyconfig.h +++ b/PC/pyconfig.h @@ -209,6 +209,16 @@ typedef int pid_t; #endif /* _MSC_VER */ +/* ------------------------------------------------------------------------*/ +/* mingw and mingw-w64 define __MINGW32__ */ +#ifdef __MINGW32__ + +#ifdef _WIN64 +#define MS_WIN64 +#endif + +#endif /* __MINGW32__*/ + /* ------------------------------------------------------------------------*/ /* egcs/gnu-win32 defines __GNUC__ and _WIN32 */ #if defined(__GNUC__) && defined(_WIN32) diff --git a/PC/python_uwp.cpp b/PC/python_uwp.cpp index 88369e8fbfeb38..2beea60e5af1ef 100644 --- a/PC/python_uwp.cpp +++ b/PC/python_uwp.cpp @@ -10,6 +10,7 @@ #include <string> +#include <appmodel.h> #include <winrt\Windows.ApplicationModel.h> #include <winrt\Windows.Storage.h> @@ -28,37 +29,49 @@ const wchar_t *PROGNAME = L"python.exe"; #endif static std::wstring -get_user_base() +get_package_family() { try { - const auto appData = winrt::Windows::Storage::ApplicationData::Current(); - if (appData) { - const auto localCache = appData.LocalCacheFolder(); - if (localCache) { - auto path = localCache.Path(); - if (!path.empty()) { - return std::wstring(path) + L"\\local-packages"; - } - } + UINT32 nameLength = MAX_PATH; + std::wstring name; + name.resize(nameLength); + DWORD rc = GetCurrentPackageFamilyName(&nameLength, name.data()); + if (rc == ERROR_SUCCESS) { + name.resize(nameLength - 1); + return name; } - } catch (...) { + else if (rc != ERROR_INSUFFICIENT_BUFFER) { + throw rc; + } + name.resize(nameLength); + rc = GetCurrentPackageFamilyName(&nameLength, name.data()); + if (rc != ERROR_SUCCESS) { + throw rc; + } + name.resize(nameLength - 1); + return name; } + catch (...) { + } + return std::wstring(); } static std::wstring -get_package_family() +get_user_base() { try { - const auto package = winrt::Windows::ApplicationModel::Package::Current(); - if (package) { - const auto id = package.Id(); - if (id) { - return std::wstring(id.FamilyName()); + const auto appData = winrt::Windows::Storage::ApplicationData::Current(); + if (appData) { + const auto localCache = appData.LocalCacheFolder(); + if (localCache) { + std::wstring path { localCache.Path().c_str() }; + if (!path.empty()) { + return path + L"\\local-packages"; + } } } - } - catch (...) { + } catch (...) { } return std::wstring(); @@ -68,13 +81,24 @@ static std::wstring get_package_home() { try { - const auto package = winrt::Windows::ApplicationModel::Package::Current(); - if (package) { - const auto path = package.InstalledLocation(); - if (path) { - return std::wstring(path.Path()); - } + UINT32 pathLength = MAX_PATH; + std::wstring path; + path.resize(pathLength); + DWORD rc = GetCurrentPackagePath(&pathLength, path.data()); + if (rc == ERROR_SUCCESS) { + path.resize(pathLength - 1); + return path; + } + else if (rc != ERROR_INSUFFICIENT_BUFFER) { + throw rc; + } + path.resize(pathLength); + rc = GetCurrentPackagePath(&pathLength, path.data()); + if (rc != ERROR_SUCCESS) { + throw rc; } + path.resize(pathLength - 1); + return path; } catch (...) { } diff --git a/PC/winreg.c b/PC/winreg.c index 6ae0d8169cc56b..63b37be526ab80 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -308,8 +308,7 @@ static PyHKEYObject * winreg_HKEYType___enter___impl(PyHKEYObject *self) /*[clinic end generated code: output=52c34986dab28990 input=c40fab1f0690a8e2]*/ { - Py_XINCREF(self); - return self; + return (PyHKEYObject*)Py_XNewRef(self); } @@ -562,42 +561,54 @@ Py2Reg(PyObject *value, DWORD typ, BYTE **retDataBuf, DWORD *retDataSize) { Py_ssize_t i,j; switch (typ) { - case REG_DWORD: - if (value != Py_None && !PyLong_Check(value)) - return FALSE; - *retDataBuf = (BYTE *)PyMem_NEW(DWORD, 1); - if (*retDataBuf == NULL){ - PyErr_NoMemory(); - return FALSE; - } - *retDataSize = sizeof(DWORD); - if (value == Py_None) { - DWORD zero = 0; - memcpy(*retDataBuf, &zero, sizeof(DWORD)); - } - else { - DWORD d = PyLong_AsUnsignedLong(value); + case REG_DWORD: + { + if (value != Py_None && !PyLong_Check(value)) { + return FALSE; + } + DWORD d; + if (value == Py_None) { + d = 0; + } + else if (PyLong_Check(value)) { + d = PyLong_AsUnsignedLong(value); + if (d == (DWORD)(-1) && PyErr_Occurred()) { + return FALSE; + } + } + *retDataBuf = (BYTE *)PyMem_NEW(DWORD, 1); + if (*retDataBuf == NULL) { + PyErr_NoMemory(); + return FALSE; + } memcpy(*retDataBuf, &d, sizeof(DWORD)); + *retDataSize = sizeof(DWORD); + break; } - break; - case REG_QWORD: - if (value != Py_None && !PyLong_Check(value)) - return FALSE; - *retDataBuf = (BYTE *)PyMem_NEW(DWORD64, 1); - if (*retDataBuf == NULL){ - PyErr_NoMemory(); - return FALSE; - } - *retDataSize = sizeof(DWORD64); - if (value == Py_None) { - DWORD64 zero = 0; - memcpy(*retDataBuf, &zero, sizeof(DWORD64)); - } - else { - DWORD64 d = PyLong_AsUnsignedLongLong(value); + case REG_QWORD: + { + if (value != Py_None && !PyLong_Check(value)) { + return FALSE; + } + DWORD64 d; + if (value == Py_None) { + d = 0; + } + else if (PyLong_Check(value)) { + d = PyLong_AsUnsignedLongLong(value); + if (d == (DWORD64)(-1) && PyErr_Occurred()) { + return FALSE; + } + } + *retDataBuf = (BYTE *)PyMem_NEW(DWORD64, 1); + if (*retDataBuf == NULL) { + PyErr_NoMemory(); + return FALSE; + } memcpy(*retDataBuf, &d, sizeof(DWORD64)); + *retDataSize = sizeof(DWORD64); + break; } - break; case REG_SZ: case REG_EXPAND_SZ: { @@ -784,8 +795,7 @@ Reg2Py(BYTE *retDataBuf, DWORD retDataSize, DWORD typ) support it natively, we should handle the bits. */ default: if (retDataSize == 0) { - Py_INCREF(Py_None); - obData = Py_None; + obData = Py_NewRef(Py_None); } else obData = PyBytes_FromStringAndSize( diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj index 8454bd67b1db1b..fce1f670510001 100644 --- a/PCbuild/_freeze_module.vcxproj +++ b/PCbuild/_freeze_module.vcxproj @@ -110,6 +110,7 @@ <ClCompile Include="..\Modules\getpath_noop.c" /> <ClCompile Include="..\Modules\posixmodule.c" /> <ClCompile Include="..\Modules\signalmodule.c" /> + <ClCompile Include="..\Modules\timemodule.c" /> <ClCompile Include="..\Modules\_tracemalloc.c" /> <ClCompile Include="..\Modules\_io\_iomodule.c" /> <ClCompile Include="..\Modules\_io\bufferedio.c" /> diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters index 6e8498dceb1cfa..dce6278987c5df 100644 --- a/PCbuild/_freeze_module.vcxproj.filters +++ b/PCbuild/_freeze_module.vcxproj.filters @@ -367,6 +367,9 @@ <ClCompile Include="..\Python\thread.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Modules\timemodule.c"> + <Filter>Source Files</Filter> + </ClCompile> <ClCompile Include="..\Parser\token.c"> <Filter>Source Files</Filter> </ClCompile> diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index b7d40c8cdc30eb..58bf4e1eacbf21 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -94,10 +94,19 @@ </PropertyGroup> <ItemGroup> <ClCompile Include="..\Modules\_testcapimodule.c" /> + <ClCompile Include="..\Modules\_testcapi\getargs.c" /> <ClCompile Include="..\Modules\_testcapi\vectorcall.c" /> <ClCompile Include="..\Modules\_testcapi\vectorcall_limited.c" /> <ClCompile Include="..\Modules\_testcapi\heaptype.c" /> <ClCompile Include="..\Modules\_testcapi\unicode.c" /> + <ClCompile Include="..\Modules\_testcapi\pytime.c" /> + <ClCompile Include="..\Modules\_testcapi\datetime.c" /> + <ClCompile Include="..\Modules\_testcapi\docstring.c" /> + <ClCompile Include="..\Modules\_testcapi\mem.c" /> + <ClCompile Include="..\Modules\_testcapi\watchers.c" /> + <ClCompile Include="..\Modules\_testcapi\float.c" /> + <ClCompile Include="..\Modules\_testcapi\long.c" /> + <ClCompile Include="..\Modules\_testcapi\structmember.c" /> </ItemGroup> <ItemGroup> <ResourceCompile Include="..\PC\python_nt.rc" /> diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index fc2c4345fe142e..101c5322761634 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -12,6 +12,9 @@ <ClCompile Include="..\Modules\_testcapimodule.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Modules\_testcapi\getargs.c"> + <Filter>Source Files</Filter> + </ClCompile> <ClCompile Include="..\Modules\_testcapi\vectorcall.c"> <Filter>Source Files</Filter> </ClCompile> @@ -24,6 +27,30 @@ <ClCompile Include="..\Modules\_testcapi\unicode.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Modules\_testcapi\pytime.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Modules\_testcapi\datetime.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Modules\_testcapi\docstring.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Modules\_testcapi\mem.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Modules\_testcapi\watchers.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Modules\_testcapi\float.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Modules\_testcapi\long.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Modules\_testcapi\structmember.c"> + <Filter>Source Files</Filter> + </ClCompile> </ItemGroup> <ItemGroup> <ResourceCompile Include="..\PC\python_nt.rc"> diff --git a/PCbuild/_testsinglephase.vcxproj b/PCbuild/_testsinglephase.vcxproj new file mode 100644 index 00000000000000..fb4bcd953923f8 --- /dev/null +++ b/PCbuild/_testsinglephase.vcxproj @@ -0,0 +1,115 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup Label="ProjectConfigurations"> + <ProjectConfiguration Include="Debug|ARM"> + <Configuration>Debug</Configuration> + <Platform>ARM</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Debug|ARM64"> + <Configuration>Debug</Configuration> + <Platform>ARM64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Debug|Win32"> + <Configuration>Debug</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Debug|x64"> + <Configuration>Debug</Configuration> + <Platform>x64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGInstrument|ARM"> + <Configuration>PGInstrument</Configuration> + <Platform>ARM</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGInstrument|ARM64"> + <Configuration>PGInstrument</Configuration> + <Platform>ARM64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGInstrument|Win32"> + <Configuration>PGInstrument</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGInstrument|x64"> + <Configuration>PGInstrument</Configuration> + <Platform>x64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGUpdate|ARM"> + <Configuration>PGUpdate</Configuration> + <Platform>ARM</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGUpdate|ARM64"> + <Configuration>PGUpdate</Configuration> + <Platform>ARM64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGUpdate|Win32"> + <Configuration>PGUpdate</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="PGUpdate|x64"> + <Configuration>PGUpdate</Configuration> + <Platform>x64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|ARM"> + <Configuration>Release</Configuration> + <Platform>ARM</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|ARM64"> + <Configuration>Release</Configuration> + <Platform>ARM64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|Win32"> + <Configuration>Release</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|x64"> + <Configuration>Release</Configuration> + <Platform>x64</Platform> + </ProjectConfiguration> + </ItemGroup> + <PropertyGroup Label="Globals"> + <ProjectGuid>{2097F1C1-597C-4167-93E3-656A7D6339B2}</ProjectGuid> + <Keyword>Win32Proj</Keyword> + <RootNamespace>_testsinglephase</RootNamespace> + <SupportPGO>false</SupportPGO> + </PropertyGroup> + <Import Project="python.props" /> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> + <PropertyGroup Label="Configuration"> + <ConfigurationType>DynamicLibrary</ConfigurationType> + <CharacterSet>NotSet</CharacterSet> + </PropertyGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> + <PropertyGroup> + <TargetExt>.pyd</TargetExt> + </PropertyGroup> + <ImportGroup Label="ExtensionSettings"> + </ImportGroup> + <ImportGroup Label="PropertySheets"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + <Import Project="pyproject.props" /> + </ImportGroup> + <PropertyGroup Label="UserMacros" /> + <ItemDefinitionGroup> + <ClCompile> + <PreprocessorDefinitions>_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions> + </ClCompile> + <Link> + <SubSystem>Console</SubSystem> + </Link> + </ItemDefinitionGroup> + <ItemGroup> + <ClCompile Include="..\Modules\_testsinglephase.c" /> + </ItemGroup> + <ItemGroup> + <ResourceCompile Include="..\PC\python_nt.rc" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="pythoncore.vcxproj"> + <Project>{cf7ac3d1-e2df-41d2-bea6-1e2556cdea26}</Project> + <ReferenceOutputAssembly>false</ReferenceOutputAssembly> + </ProjectReference> + </ItemGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> + <ImportGroup Label="ExtensionTargets"> + </ImportGroup> +</Project> diff --git a/PCbuild/_testsinglephase.vcxproj.filters b/PCbuild/_testsinglephase.vcxproj.filters new file mode 100644 index 00000000000000..2a0e0ef66c116f --- /dev/null +++ b/PCbuild/_testsinglephase.vcxproj.filters @@ -0,0 +1,23 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup> + <Filter Include="Source Files"> + <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier> + <Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions> + </Filter> + <Filter Include="Resource Files"> + <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier> + <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions> + </Filter> + </ItemGroup> + <ItemGroup> + <ClCompile Include="..\Modules\_testsinglephase.c"> + <Filter>Source Files</Filter> + </ClCompile> + </ItemGroup> + <ItemGroup> + <ResourceCompile Include="..\PC\python_nt.rc"> + <Filter>Resource Files</Filter> + </ResourceCompile> + </ItemGroup> +</Project> diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index 681c79f85d0c94..98cca979fdfcd0 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -59,7 +59,7 @@ if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.12. if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.12.1 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tix-8.4.3.6 set libraries=%libraries% xz-5.2.5 -set libraries=%libraries% zlib-1.2.12 +set libraries=%libraries% zlib-1.2.13 for %%e in (%libraries%) do ( if exist "%EXTERNALS_DIR%\%%e" ( diff --git a/PCbuild/lib.pyproj b/PCbuild/lib.pyproj deleted file mode 100644 index daa2021191027d..00000000000000 --- a/PCbuild/lib.pyproj +++ /dev/null @@ -1,1817 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" DefaultTargets="Build"> - <PropertyGroup> - <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> - <SchemaVersion>2.0</SchemaVersion> - <ProjectGuid>{cb12a4c2-3757-4e67-8f51-c533876cefd1}</ProjectGuid> - <ProjectHome>..\Lib\</ProjectHome> - <StartupFile>abc.py</StartupFile> - <SearchPath /> - <WorkingDirectory>.</WorkingDirectory> - <OutputPath>.</OutputPath> - <ProjectTypeGuids>{888888a0-9f3d-457c-b088-3a5042f75d52}</ProjectTypeGuids> - <LaunchProvider>Standard Python launcher</LaunchProvider> - <InterpreterId /> - </PropertyGroup> - <PropertyGroup Condition="'$(Configuration)' == 'Debug'" /> - <PropertyGroup Condition="'$(Configuration)' == 'Release'" /> - <PropertyGroup> - <VisualStudioVersion Condition=" '$(VisualStudioVersion)' == '' ">10.0</VisualStudioVersion> - </PropertyGroup> - <ItemGroup> - <Compile Include="abc.py" /> - <Compile Include="aifc.py" /> - <Compile Include="antigravity.py" /> - <Compile Include="argparse.py" /> - <Compile Include="ast.py" /> - <Compile Include="asynchat.py" /> - <Compile Include="asyncio\base_events.py" /> - <Compile Include="asyncio\base_futures.py" /> - <Compile Include="asyncio\base_subprocess.py" /> - <Compile Include="asyncio\base_tasks.py" /> - <Compile Include="asyncio\compat.py" /> - <Compile Include="asyncio\constants.py" /> - <Compile Include="asyncio\coroutines.py" /> - <Compile Include="asyncio\events.py" /> - <Compile Include="asyncio\futures.py" /> - <Compile Include="asyncio\locks.py" /> - <Compile Include="asyncio\log.py" /> - <Compile Include="asyncio\proactor_events.py" /> - <Compile Include="asyncio\protocols.py" /> - <Compile Include="asyncio\queues.py" /> - <Compile Include="asyncio\selector_events.py" /> - <Compile Include="asyncio\sslproto.py" /> - <Compile Include="asyncio\streams.py" /> - <Compile Include="asyncio\subprocess.py" /> - <Compile Include="asyncio\tasks.py" /> - <Compile Include="asyncio\test_utils.py" /> - <Compile Include="asyncio\transports.py" /> - <Compile Include="asyncio\unix_events.py" /> - <Compile Include="asyncio\windows_events.py" /> - <Compile Include="asyncio\windows_utils.py" /> - <Compile Include="asyncio\__init__.py" /> - <Compile Include="asyncore.py" /> - <Compile Include="base64.py" /> - <Compile Include="bdb.py" /> - <Compile Include="bisect.py" /> - <Compile Include="bz2.py" /> - <Compile Include="calendar.py" /> - <Compile Include="cgi.py" /> - <Compile Include="cgitb.py" /> - <Compile Include="chunk.py" /> - <Compile Include="cmd.py" /> - <Compile Include="code.py" /> - <Compile Include="codecs.py" /> - <Compile Include="codeop.py" /> - <Compile Include="collections\abc.py" /> - <Compile Include="collections\__init__.py" /> - <Compile Include="colorsys.py" /> - <Compile Include="compileall.py" /> - <Compile Include="concurrent\futures\process.py" /> - <Compile Include="concurrent\futures\thread.py" /> - <Compile Include="concurrent\futures\_base.py" /> - <Compile Include="concurrent\futures\__init__.py" /> - <Compile Include="concurrent\__init__.py" /> - <Compile Include="configparser.py" /> - <Compile Include="contextlib.py" /> - <Compile Include="copy.py" /> - <Compile Include="copyreg.py" /> - <Compile Include="cProfile.py" /> - <Compile Include="crypt.py" /> - <Compile Include="csv.py" /> - <Compile Include="ctypes\macholib\dyld.py" /> - <Compile Include="ctypes\macholib\dylib.py" /> - <Compile Include="ctypes\macholib\framework.py" /> - <Compile Include="ctypes\macholib\__init__.py" /> - <Compile Include="ctypes\util.py" /> - <Compile Include="ctypes\wintypes.py" /> - <Compile Include="ctypes\_endian.py" /> - <Compile Include="ctypes\__init__.py" /> - <Compile Include="curses\ascii.py" /> - <Compile Include="curses\has_key.py" /> - <Compile Include="curses\panel.py" /> - <Compile Include="curses\textpad.py" /> - <Compile Include="curses\__init__.py" /> - <Compile Include="datetime.py" /> - <Compile Include="dbm\dumb.py" /> - <Compile Include="dbm\gnu.py" /> - <Compile Include="dbm\ndbm.py" /> - <Compile Include="dbm\__init__.py" /> - <Compile Include="decimal.py" /> - <Compile Include="difflib.py" /> - <Compile Include="dis.py" /> - <Compile Include="distutils\archive_util.py" /> - <Compile Include="distutils\bcppcompiler.py" /> - <Compile Include="distutils\ccompiler.py" /> - <Compile Include="distutils\cmd.py" /> - <Compile Include="distutils\command\bdist.py" /> - <Compile Include="distutils\command\bdist_dumb.py" /> - <Compile Include="distutils\command\bdist_rpm.py" /> - <Compile Include="distutils\command\build.py" /> - <Compile Include="distutils\command\build_clib.py" /> - <Compile Include="distutils\command\build_ext.py" /> - <Compile Include="distutils\command\build_py.py" /> - <Compile Include="distutils\command\build_scripts.py" /> - <Compile Include="distutils\command\check.py" /> - <Compile Include="distutils\command\clean.py" /> - <Compile Include="distutils\command\config.py" /> - <Compile Include="distutils\command\install.py" /> - <Compile Include="distutils\command\install_data.py" /> - <Compile Include="distutils\command\install_egg_info.py" /> - <Compile Include="distutils\command\install_headers.py" /> - <Compile Include="distutils\command\install_lib.py" /> - <Compile Include="distutils\command\install_scripts.py" /> - <Compile Include="distutils\command\register.py" /> - <Compile Include="distutils\command\sdist.py" /> - <Compile Include="distutils\command\upload.py" /> - <Compile Include="distutils\command\__init__.py" /> - <Compile Include="distutils\config.py" /> - <Compile Include="distutils\core.py" /> - <Compile Include="distutils\cygwinccompiler.py" /> - <Compile Include="distutils\debug.py" /> - <Compile Include="distutils\dep_util.py" /> - <Compile Include="distutils\dir_util.py" /> - <Compile Include="distutils\dist.py" /> - <Compile Include="distutils\errors.py" /> - <Compile Include="distutils\extension.py" /> - <Compile Include="distutils\fancy_getopt.py" /> - <Compile Include="distutils\filelist.py" /> - <Compile Include="distutils\file_util.py" /> - <Compile Include="distutils\log.py" /> - <Compile Include="distutils\msvc9compiler.py" /> - <Compile Include="distutils\msvccompiler.py" /> - <Compile Include="distutils\spawn.py" /> - <Compile Include="distutils\sysconfig.py" /> - <Compile Include="distutils\tests\support.py" /> - <Compile Include="distutils\tests\test_archive_util.py" /> - <Compile Include="distutils\tests\test_bdist.py" /> - <Compile Include="distutils\tests\test_bdist_dumb.py" /> - <Compile Include="distutils\tests\test_bdist_rpm.py" /> - <Compile Include="distutils\tests\test_build.py" /> - <Compile Include="distutils\tests\test_build_clib.py" /> - <Compile Include="distutils\tests\test_build_ext.py" /> - <Compile Include="distutils\tests\test_build_py.py" /> - <Compile Include="distutils\tests\test_build_scripts.py" /> - <Compile Include="distutils\tests\test_check.py" /> - <Compile Include="distutils\tests\test_clean.py" /> - <Compile Include="distutils\tests\test_cmd.py" /> - <Compile Include="distutils\tests\test_config.py" /> - <Compile Include="distutils\tests\test_config_cmd.py" /> - <Compile Include="distutils\tests\test_core.py" /> - <Compile Include="distutils\tests\test_cygwinccompiler.py" /> - <Compile Include="distutils\tests\test_dep_util.py" /> - <Compile Include="distutils\tests\test_dir_util.py" /> - <Compile Include="distutils\tests\test_dist.py" /> - <Compile Include="distutils\tests\test_extension.py" /> - <Compile Include="distutils\tests\test_filelist.py" /> - <Compile Include="distutils\tests\test_file_util.py" /> - <Compile Include="distutils\tests\test_install.py" /> - <Compile Include="distutils\tests\test_install_data.py" /> - <Compile Include="distutils\tests\test_install_headers.py" /> - <Compile Include="distutils\tests\test_install_lib.py" /> - <Compile Include="distutils\tests\test_install_scripts.py" /> - <Compile Include="distutils\tests\test_log.py" /> - <Compile Include="distutils\tests\test_msvc9compiler.py" /> - <Compile Include="distutils\tests\test_msvccompiler.py" /> - <Compile Include="distutils\tests\test_register.py" /> - <Compile Include="distutils\tests\test_sdist.py" /> - <Compile Include="distutils\tests\test_spawn.py" /> - <Compile Include="distutils\tests\test_sysconfig.py" /> - <Compile Include="distutils\tests\test_text_file.py" /> - <Compile Include="distutils\tests\test_unixccompiler.py" /> - <Compile Include="distutils\tests\test_upload.py" /> - <Compile Include="distutils\tests\test_util.py" /> - <Compile Include="distutils\tests\test_version.py" /> - <Compile Include="distutils\tests\test_versionpredicate.py" /> - <Compile Include="distutils\tests\__init__.py" /> - <Compile Include="distutils\text_file.py" /> - <Compile Include="distutils\unixccompiler.py" /> - <Compile Include="distutils\util.py" /> - <Compile Include="distutils\version.py" /> - <Compile Include="distutils\versionpredicate.py" /> - <Compile Include="distutils\_msvccompiler.py" /> - <Compile Include="distutils\__init__.py" /> - <Compile Include="doctest.py" /> - <Compile Include="email\base64mime.py" /> - <Compile Include="email\charset.py" /> - <Compile Include="email\contentmanager.py" /> - <Compile Include="email\encoders.py" /> - <Compile Include="email\errors.py" /> - <Compile Include="email\feedparser.py" /> - <Compile Include="email\generator.py" /> - <Compile Include="email\header.py" /> - <Compile Include="email\headerregistry.py" /> - <Compile Include="email\iterators.py" /> - <Compile Include="email\message.py" /> - <Compile Include="email\mime\application.py" /> - <Compile Include="email\mime\audio.py" /> - <Compile Include="email\mime\base.py" /> - <Compile Include="email\mime\image.py" /> - <Compile Include="email\mime\message.py" /> - <Compile Include="email\mime\multipart.py" /> - <Compile Include="email\mime\nonmultipart.py" /> - <Compile Include="email\mime\text.py" /> - <Compile Include="email\mime\__init__.py" /> - <Compile Include="email\parser.py" /> - <Compile Include="email\policy.py" /> - <Compile Include="email\quoprimime.py" /> - <Compile Include="email\utils.py" /> - <Compile Include="email\_encoded_words.py" /> - <Compile Include="email\_header_value_parser.py" /> - <Compile Include="email\_parseaddr.py" /> - <Compile Include="email\_policybase.py" /> - <Compile Include="email\__init__.py" /> - <Compile Include="encodings\aliases.py" /> - <Compile Include="encodings\ascii.py" /> - <Compile Include="encodings\base64_codec.py" /> - <Compile Include="encodings\big5.py" /> - <Compile Include="encodings\big5hkscs.py" /> - <Compile Include="encodings\bz2_codec.py" /> - <Compile Include="encodings\charmap.py" /> - <Compile Include="encodings\cp037.py" /> - <Compile Include="encodings\cp1006.py" /> - <Compile Include="encodings\cp1026.py" /> - <Compile Include="encodings\cp1125.py" /> - <Compile Include="encodings\cp1140.py" /> - <Compile Include="encodings\cp1250.py" /> - <Compile Include="encodings\cp1251.py" /> - <Compile Include="encodings\cp1252.py" /> - <Compile Include="encodings\cp1253.py" /> - <Compile Include="encodings\cp1254.py" /> - <Compile Include="encodings\cp1255.py" /> - <Compile Include="encodings\cp1256.py" /> - <Compile Include="encodings\cp1257.py" /> - <Compile Include="encodings\cp1258.py" /> - <Compile Include="encodings\cp273.py" /> - <Compile Include="encodings\cp424.py" /> - <Compile Include="encodings\cp437.py" /> - <Compile Include="encodings\cp500.py" /> - <Compile Include="encodings\cp65001.py" /> - <Compile Include="encodings\cp720.py" /> - <Compile Include="encodings\cp737.py" /> - <Compile Include="encodings\cp775.py" /> - <Compile Include="encodings\cp850.py" /> - <Compile Include="encodings\cp852.py" /> - <Compile Include="encodings\cp855.py" /> - <Compile Include="encodings\cp856.py" /> - <Compile Include="encodings\cp857.py" /> - <Compile Include="encodings\cp858.py" /> - <Compile Include="encodings\cp860.py" /> - <Compile Include="encodings\cp861.py" /> - <Compile Include="encodings\cp862.py" /> - <Compile Include="encodings\cp863.py" /> - <Compile Include="encodings\cp864.py" /> - <Compile Include="encodings\cp865.py" /> - <Compile Include="encodings\cp866.py" /> - <Compile Include="encodings\cp869.py" /> - <Compile Include="encodings\cp874.py" /> - <Compile Include="encodings\cp875.py" /> - <Compile Include="encodings\cp932.py" /> - <Compile Include="encodings\cp949.py" /> - <Compile Include="encodings\cp950.py" /> - <Compile Include="encodings\euc_jisx0213.py" /> - <Compile Include="encodings\euc_jis_2004.py" /> - <Compile Include="encodings\euc_jp.py" /> - <Compile Include="encodings\euc_kr.py" /> - <Compile Include="encodings\gb18030.py" /> - <Compile Include="encodings\gb2312.py" /> - <Compile Include="encodings\gbk.py" /> - <Compile Include="encodings\hex_codec.py" /> - <Compile Include="encodings\hp_roman8.py" /> - <Compile Include="encodings\hz.py" /> - <Compile Include="encodings\idna.py" /> - <Compile Include="encodings\iso2022_jp.py" /> - <Compile Include="encodings\iso2022_jp_1.py" /> - <Compile Include="encodings\iso2022_jp_2.py" /> - <Compile Include="encodings\iso2022_jp_2004.py" /> - <Compile Include="encodings\iso2022_jp_3.py" /> - <Compile Include="encodings\iso2022_jp_ext.py" /> - <Compile Include="encodings\iso2022_kr.py" /> - <Compile Include="encodings\iso8859_1.py" /> - <Compile Include="encodings\iso8859_10.py" /> - <Compile Include="encodings\iso8859_11.py" /> - <Compile Include="encodings\iso8859_13.py" /> - <Compile Include="encodings\iso8859_14.py" /> - <Compile Include="encodings\iso8859_15.py" /> - <Compile Include="encodings\iso8859_16.py" /> - <Compile Include="encodings\iso8859_2.py" /> - <Compile Include="encodings\iso8859_3.py" /> - <Compile Include="encodings\iso8859_4.py" /> - <Compile Include="encodings\iso8859_5.py" /> - <Compile Include="encodings\iso8859_6.py" /> - <Compile Include="encodings\iso8859_7.py" /> - <Compile Include="encodings\iso8859_8.py" /> - <Compile Include="encodings\iso8859_9.py" /> - <Compile Include="encodings\johab.py" /> - <Compile Include="encodings\koi8_r.py" /> - <Compile Include="encodings\koi8_t.py" /> - <Compile Include="encodings\koi8_u.py" /> - <Compile Include="encodings\kz1048.py" /> - <Compile Include="encodings\latin_1.py" /> - <Compile Include="encodings\mac_arabic.py" /> - <Compile Include="encodings\mac_centeuro.py" /> - <Compile Include="encodings\mac_croatian.py" /> - <Compile Include="encodings\mac_cyrillic.py" /> - <Compile Include="encodings\mac_farsi.py" /> - <Compile Include="encodings\mac_greek.py" /> - <Compile Include="encodings\mac_iceland.py" /> - <Compile Include="encodings\mac_latin2.py" /> - <Compile Include="encodings\mac_roman.py" /> - <Compile Include="encodings\mac_romanian.py" /> - <Compile Include="encodings\mac_turkish.py" /> - <Compile Include="encodings\mbcs.py" /> - <Compile Include="encodings\oem.py" /> - <Compile Include="encodings\palmos.py" /> - <Compile Include="encodings\ptcp154.py" /> - <Compile Include="encodings\punycode.py" /> - <Compile Include="encodings\quopri_codec.py" /> - <Compile Include="encodings\raw_unicode_escape.py" /> - <Compile Include="encodings\rot_13.py" /> - <Compile Include="encodings\shift_jis.py" /> - <Compile Include="encodings\shift_jisx0213.py" /> - <Compile Include="encodings\shift_jis_2004.py" /> - <Compile Include="encodings\tis_620.py" /> - <Compile Include="encodings\undefined.py" /> - <Compile Include="encodings\unicode_escape.py" /> - <Compile Include="encodings\utf_16.py" /> - <Compile Include="encodings\utf_16_be.py" /> - <Compile Include="encodings\utf_16_le.py" /> - <Compile Include="encodings\utf_32.py" /> - <Compile Include="encodings\utf_32_be.py" /> - <Compile Include="encodings\utf_32_le.py" /> - <Compile Include="encodings\utf_7.py" /> - <Compile Include="encodings\utf_8.py" /> - <Compile Include="encodings\utf_8_sig.py" /> - <Compile Include="encodings\uu_codec.py" /> - <Compile Include="encodings\zlib_codec.py" /> - <Compile Include="encodings\__init__.py" /> - <Compile Include="ensurepip\_uninstall.py" /> - <Compile Include="ensurepip\__init__.py" /> - <Compile Include="ensurepip\__main__.py" /> - <Compile Include="enum.py" /> - <Compile Include="filecmp.py" /> - <Compile Include="fileinput.py" /> - <Compile Include="fnmatch.py" /> - <Compile Include="formatter.py" /> - <Compile Include="fractions.py" /> - <Compile Include="ftplib.py" /> - <Compile Include="functools.py" /> - <Compile Include="genericpath.py" /> - <Compile Include="getopt.py" /> - <Compile Include="getpass.py" /> - <Compile Include="gettext.py" /> - <Compile Include="glob.py" /> - <Compile Include="graphlib.py" /> - <Compile Include="gzip.py" /> - <Compile Include="hashlib.py" /> - <Compile Include="heapq.py" /> - <Compile Include="hmac.py" /> - <Compile Include="html\entities.py" /> - <Compile Include="html\parser.py" /> - <Compile Include="html\__init__.py" /> - <Compile Include="http\client.py" /> - <Compile Include="http\cookiejar.py" /> - <Compile Include="http\cookies.py" /> - <Compile Include="http\server.py" /> - <Compile Include="http\__init__.py" /> - <Compile Include="idlelib\autocomplete.py" /> - <Compile Include="idlelib\autocomplete_w.py" /> - <Compile Include="idlelib\autoexpand.py" /> - <Compile Include="idlelib\browser.py" /> - <Compile Include="idlelib\calltips.py" /> - <Compile Include="idlelib\calltip_w.py" /> - <Compile Include="idlelib\codecontext.py" /> - <Compile Include="idlelib\colorizer.py" /> - <Compile Include="idlelib\config.py" /> - <Compile Include="idlelib\configdialog.py" /> - <Compile Include="idlelib\config_key.py" /> - <Compile Include="idlelib\debugger.py" /> - <Compile Include="idlelib\debugger_r.py" /> - <Compile Include="idlelib\debugobj.py" /> - <Compile Include="idlelib\debugobj_r.py" /> - <Compile Include="idlelib\delegator.py" /> - <Compile Include="idlelib\dynoption.py" /> - <Compile Include="idlelib\editor.py" /> - <Compile Include="idlelib\filelist.py" /> - <Compile Include="idlelib\grep.py" /> - <Compile Include="idlelib\help.py" /> - <Compile Include="idlelib\help_about.py" /> - <Compile Include="idlelib\history.py" /> - <Compile Include="idlelib\hyperparser.py" /> - <Compile Include="idlelib\idle.py" /> - <Compile Include="idlelib\idle.pyw" /> - <Compile Include="idlelib\idle_test\htest.py" /> - <Compile Include="idlelib\idle_test\mock_idle.py" /> - <Compile Include="idlelib\idle_test\mock_tk.py" /> - <Compile Include="idlelib\idle_test\test_autocomplete.py" /> - <Compile Include="idlelib\idle_test\test_autoexpand.py" /> - <Compile Include="idlelib\idle_test\test_calltips.py" /> - <Compile Include="idlelib\idle_test\test_colorizer.py" /> - <Compile Include="idlelib\idle_test\test_config.py" /> - <Compile Include="idlelib\idle_test\test_configdialog.py" /> - <Compile Include="idlelib\idle_test\test_config_key.py" /> - <Compile Include="idlelib\idle_test\test_debugger.py" /> - <Compile Include="idlelib\idle_test\test_delegator.py" /> - <Compile Include="idlelib\idle_test\test_editmenu.py" /> - <Compile Include="idlelib\idle_test\test_editor.py" /> - <Compile Include="idlelib\idle_test\test_grep.py" /> - <Compile Include="idlelib\idle_test\test_help.py" /> - <Compile Include="idlelib\idle_test\test_help_about.py" /> - <Compile Include="idlelib\idle_test\test_history.py" /> - <Compile Include="idlelib\idle_test\test_hyperparser.py" /> - <Compile Include="idlelib\idle_test\test_iomenu.py" /> - <Compile Include="idlelib\idle_test\test_macosx.py" /> - <Compile Include="idlelib\idle_test\test_paragraph.py" /> - <Compile Include="idlelib\idle_test\test_parenmatch.py" /> - <Compile Include="idlelib\idle_test\test_pathbrowser.py" /> - <Compile Include="idlelib\idle_test\test_percolator.py" /> - <Compile Include="idlelib\idle_test\test_query.py" /> - <Compile Include="idlelib\idle_test\test_redirector.py" /> - <Compile Include="idlelib\idle_test\test_replace.py" /> - <Compile Include="idlelib\idle_test\test_rstrip.py" /> - <Compile Include="idlelib\idle_test\test_scrolledlist.py" /> - <Compile Include="idlelib\idle_test\test_search.py" /> - <Compile Include="idlelib\idle_test\test_searchbase.py" /> - <Compile Include="idlelib\idle_test\test_searchengine.py" /> - <Compile Include="idlelib\idle_test\test_text.py" /> - <Compile Include="idlelib\idle_test\test_textview.py" /> - <Compile Include="idlelib\idle_test\test_tree.py" /> - <Compile Include="idlelib\idle_test\test_undo.py" /> - <Compile Include="idlelib\idle_test\test_warning.py" /> - <Compile Include="idlelib\idle_test\__init__.py" /> - <Compile Include="idlelib\iomenu.py" /> - <Compile Include="idlelib\macosx.py" /> - <Compile Include="idlelib\mainmenu.py" /> - <Compile Include="idlelib\multicall.py" /> - <Compile Include="idlelib\outwin.py" /> - <Compile Include="idlelib\paragraph.py" /> - <Compile Include="idlelib\parenmatch.py" /> - <Compile Include="idlelib\pathbrowser.py" /> - <Compile Include="idlelib\percolator.py" /> - <Compile Include="idlelib\pyparse.py" /> - <Compile Include="idlelib\pyshell.py" /> - <Compile Include="idlelib\query.py" /> - <Compile Include="idlelib\redirector.py" /> - <Compile Include="idlelib\replace.py" /> - <Compile Include="idlelib\rpc.py" /> - <Compile Include="idlelib\rstrip.py" /> - <Compile Include="idlelib\run.py" /> - <Compile Include="idlelib\runscript.py" /> - <Compile Include="idlelib\scrolledlist.py" /> - <Compile Include="idlelib\search.py" /> - <Compile Include="idlelib\searchbase.py" /> - <Compile Include="idlelib\searchengine.py" /> - <Compile Include="idlelib\stackviewer.py" /> - <Compile Include="idlelib\statusbar.py" /> - <Compile Include="idlelib\tabbedpages.py" /> - <Compile Include="idlelib\textview.py" /> - <Compile Include="idlelib\tooltip.py" /> - <Compile Include="idlelib\tree.py" /> - <Compile Include="idlelib\undo.py" /> - <Compile Include="idlelib\windows.py" /> - <Compile Include="idlelib\zoomheight.py" /> - <Compile Include="idlelib\__init__.py" /> - <Compile Include="idlelib\__main__.py" /> - <Compile Include="imaplib.py" /> - <Compile Include="imghdr.py" /> - <Compile Include="imp.py" /> - <Compile Include="importlib\abc.py" /> - <Compile Include="importlib\machinery.py" /> - <Compile Include="importlib\util.py" /> - <Compile Include="importlib\_bootstrap.py" /> - <Compile Include="importlib\_bootstrap_external.py" /> - <Compile Include="importlib\__init__.py" /> - <Compile Include="inspect.py" /> - <Compile Include="io.py" /> - <Compile Include="ipaddress.py" /> - <Compile Include="json\decoder.py" /> - <Compile Include="json\encoder.py" /> - <Compile Include="json\scanner.py" /> - <Compile Include="json\tool.py" /> - <Compile Include="json\__init__.py" /> - <Compile Include="keyword.py" /> - <Compile Include="lib2to3\btm_matcher.py" /> - <Compile Include="lib2to3\btm_utils.py" /> - <Compile Include="lib2to3\fixer_base.py" /> - <Compile Include="lib2to3\fixer_util.py" /> - <Compile Include="lib2to3\fixes\fix_apply.py" /> - <Compile Include="lib2to3\fixes\fix_asserts.py" /> - <Compile Include="lib2to3\fixes\fix_basestring.py" /> - <Compile Include="lib2to3\fixes\fix_buffer.py" /> - <Compile Include="lib2to3\fixes\fix_dict.py" /> - <Compile Include="lib2to3\fixes\fix_except.py" /> - <Compile Include="lib2to3\fixes\fix_exec.py" /> - <Compile Include="lib2to3\fixes\fix_execfile.py" /> - <Compile Include="lib2to3\fixes\fix_exitfunc.py" /> - <Compile Include="lib2to3\fixes\fix_filter.py" /> - <Compile Include="lib2to3\fixes\fix_funcattrs.py" /> - <Compile Include="lib2to3\fixes\fix_future.py" /> - <Compile Include="lib2to3\fixes\fix_getcwdu.py" /> - <Compile Include="lib2to3\fixes\fix_has_key.py" /> - <Compile Include="lib2to3\fixes\fix_idioms.py" /> - <Compile Include="lib2to3\fixes\fix_import.py" /> - <Compile Include="lib2to3\fixes\fix_imports.py" /> - <Compile Include="lib2to3\fixes\fix_imports2.py" /> - <Compile Include="lib2to3\fixes\fix_input.py" /> - <Compile Include="lib2to3\fixes\fix_intern.py" /> - <Compile Include="lib2to3\fixes\fix_isinstance.py" /> - <Compile Include="lib2to3\fixes\fix_itertools.py" /> - <Compile Include="lib2to3\fixes\fix_itertools_imports.py" /> - <Compile Include="lib2to3\fixes\fix_long.py" /> - <Compile Include="lib2to3\fixes\fix_map.py" /> - <Compile Include="lib2to3\fixes\fix_metaclass.py" /> - <Compile Include="lib2to3\fixes\fix_methodattrs.py" /> - <Compile Include="lib2to3\fixes\fix_ne.py" /> - <Compile Include="lib2to3\fixes\fix_next.py" /> - <Compile Include="lib2to3\fixes\fix_nonzero.py" /> - <Compile Include="lib2to3\fixes\fix_numliterals.py" /> - <Compile Include="lib2to3\fixes\fix_operator.py" /> - <Compile Include="lib2to3\fixes\fix_paren.py" /> - <Compile Include="lib2to3\fixes\fix_print.py" /> - <Compile Include="lib2to3\fixes\fix_raise.py" /> - <Compile Include="lib2to3\fixes\fix_raw_input.py" /> - <Compile Include="lib2to3\fixes\fix_reduce.py" /> - <Compile Include="lib2to3\fixes\fix_reload.py" /> - <Compile Include="lib2to3\fixes\fix_renames.py" /> - <Compile Include="lib2to3\fixes\fix_repr.py" /> - <Compile Include="lib2to3\fixes\fix_set_literal.py" /> - <Compile Include="lib2to3\fixes\fix_standarderror.py" /> - <Compile Include="lib2to3\fixes\fix_sys_exc.py" /> - <Compile Include="lib2to3\fixes\fix_throw.py" /> - <Compile Include="lib2to3\fixes\fix_tuple_params.py" /> - <Compile Include="lib2to3\fixes\fix_types.py" /> - <Compile Include="lib2to3\fixes\fix_unicode.py" /> - <Compile Include="lib2to3\fixes\fix_urllib.py" /> - <Compile Include="lib2to3\fixes\fix_ws_comma.py" /> - <Compile Include="lib2to3\fixes\fix_xrange.py" /> - <Compile Include="lib2to3\fixes\fix_xreadlines.py" /> - <Compile Include="lib2to3\fixes\fix_zip.py" /> - <Compile Include="lib2to3\fixes\__init__.py" /> - <Compile Include="lib2to3\main.py" /> - <Compile Include="lib2to3\patcomp.py" /> - <Compile Include="lib2to3\pgen2\conv.py" /> - <Compile Include="lib2to3\pgen2\driver.py" /> - <Compile Include="lib2to3\pgen2\grammar.py" /> - <Compile Include="lib2to3\pgen2\literals.py" /> - <Compile Include="lib2to3\pgen2\parse.py" /> - <Compile Include="lib2to3\pgen2\pgen.py" /> - <Compile Include="lib2to3\pgen2\token.py" /> - <Compile Include="lib2to3\pgen2\tokenize.py" /> - <Compile Include="lib2to3\pgen2\__init__.py" /> - <Compile Include="lib2to3\pygram.py" /> - <Compile Include="lib2to3\pytree.py" /> - <Compile Include="lib2to3\refactor.py" /> - <Compile Include="lib2to3\__init__.py" /> - <Compile Include="lib2to3\__main__.py" /> - <Compile Include="linecache.py" /> - <Compile Include="locale.py" /> - <Compile Include="logging\config.py" /> - <Compile Include="logging\handlers.py" /> - <Compile Include="logging\__init__.py" /> - <Compile Include="lzma.py" /> - <Compile Include="mailbox.py" /> - <Compile Include="mailcap.py" /> - <Compile Include="mimetypes.py" /> - <Compile Include="modulefinder.py" /> - <Compile Include="msilib\schema.py" /> - <Compile Include="msilib\sequence.py" /> - <Compile Include="msilib\text.py" /> - <Compile Include="msilib\__init__.py" /> - <Compile Include="multiprocessing\connection.py" /> - <Compile Include="multiprocessing\context.py" /> - <Compile Include="multiprocessing\dummy\connection.py" /> - <Compile Include="multiprocessing\dummy\__init__.py" /> - <Compile Include="multiprocessing\forkserver.py" /> - <Compile Include="multiprocessing\heap.py" /> - <Compile Include="multiprocessing\managers.py" /> - <Compile Include="multiprocessing\pool.py" /> - <Compile Include="multiprocessing\popen_fork.py" /> - <Compile Include="multiprocessing\popen_forkserver.py" /> - <Compile Include="multiprocessing\popen_spawn_posix.py" /> - <Compile Include="multiprocessing\popen_spawn_win32.py" /> - <Compile Include="multiprocessing\process.py" /> - <Compile Include="multiprocessing\queues.py" /> - <Compile Include="multiprocessing\reduction.py" /> - <Compile Include="multiprocessing\resource_sharer.py" /> - <Compile Include="multiprocessing\resource_tracker.py" /> - <Compile Include="multiprocessing\sharedctypes.py" /> - <Compile Include="multiprocessing\spawn.py" /> - <Compile Include="multiprocessing\synchronize.py" /> - <Compile Include="multiprocessing\util.py" /> - <Compile Include="multiprocessing\__init__.py" /> - <Compile Include="netrc.py" /> - <Compile Include="nntplib.py" /> - <Compile Include="ntpath.py" /> - <Compile Include="nturl2path.py" /> - <Compile Include="numbers.py" /> - <Compile Include="opcode.py" /> - <Compile Include="operator.py" /> - <Compile Include="optparse.py" /> - <Compile Include="os.py" /> - <Compile Include="pathlib.py" /> - <Compile Include="pdb.py" /> - <Compile Include="pickle.py" /> - <Compile Include="pickletools.py" /> - <Compile Include="pipes.py" /> - <Compile Include="pkgutil.py" /> - <Compile Include="platform.py" /> - <Compile Include="plistlib.py" /> - <Compile Include="poplib.py" /> - <Compile Include="posixpath.py" /> - <Compile Include="pprint.py" /> - <Compile Include="profile.py" /> - <Compile Include="pstats.py" /> - <Compile Include="pty.py" /> - <Compile Include="pyclbr.py" /> - <Compile Include="pydoc.py" /> - <Compile Include="pydoc_data\topics.py" /> - <Compile Include="pydoc_data\__init__.py" /> - <Compile Include="py_compile.py" /> - <Compile Include="queue.py" /> - <Compile Include="quopri.py" /> - <Compile Include="random.py" /> - <Compile Include="re.py" /> - <Compile Include="reprlib.py" /> - <Compile Include="rlcompleter.py" /> - <Compile Include="runpy.py" /> - <Compile Include="sched.py" /> - <Compile Include="secrets.py" /> - <Compile Include="selectors.py" /> - <Compile Include="shelve.py" /> - <Compile Include="shlex.py" /> - <Compile Include="shutil.py" /> - <Compile Include="signal.py" /> - <Compile Include="site.py" /> - <Compile Include="smtplib.py" /> - <Compile Include="sndhdr.py" /> - <Compile Include="socket.py" /> - <Compile Include="socketserver.py" /> - <Compile Include="sqlite3\dbapi2.py" /> - <Compile Include="sqlite3\dump.py" /> - <Compile Include="sqlite3\test\dbapi.py" /> - <Compile Include="sqlite3\test\dump.py" /> - <Compile Include="sqlite3\test\factory.py" /> - <Compile Include="sqlite3\test\hooks.py" /> - <Compile Include="sqlite3\test\regression.py" /> - <Compile Include="sqlite3\test\transactions.py" /> - <Compile Include="sqlite3\test\types.py" /> - <Compile Include="sqlite3\test\userfunctions.py" /> - <Compile Include="sqlite3\test\__init__.py" /> - <Compile Include="sqlite3\__init__.py" /> - <Compile Include="sre_compile.py" /> - <Compile Include="sre_constants.py" /> - <Compile Include="sre_parse.py" /> - <Compile Include="ssl.py" /> - <Compile Include="stat.py" /> - <Compile Include="statistics.py" /> - <Compile Include="string.py" /> - <Compile Include="stringprep.py" /> - <Compile Include="struct.py" /> - <Compile Include="subprocess.py" /> - <Compile Include="sunau.py" /> - <Compile Include="symbol.py" /> - <Compile Include="symtable.py" /> - <Compile Include="sysconfig.py" /> - <Compile Include="tabnanny.py" /> - <Compile Include="tarfile.py" /> - <Compile Include="telnetlib.py" /> - <Compile Include="tempfile.py" /> - <Compile Include="test\ann_module.py" /> - <Compile Include="test\ann_module2.py" /> - <Compile Include="test\ann_module3.py" /> - <Compile Include="test\audiotests.py" /> - <Compile Include="test\autotest.py" /> - <Compile Include="test\badsyntax_3131.py" /> - <Compile Include="test\badsyntax_future10.py" /> - <Compile Include="test\badsyntax_future3.py" /> - <Compile Include="test\badsyntax_future4.py" /> - <Compile Include="test\badsyntax_future5.py" /> - <Compile Include="test\badsyntax_future6.py" /> - <Compile Include="test\badsyntax_future7.py" /> - <Compile Include="test\badsyntax_future8.py" /> - <Compile Include="test\badsyntax_future9.py" /> - <Compile Include="test\badsyntax_pep3120.py" /> - <Compile Include="test\bad_coding.py" /> - <Compile Include="test\bad_coding2.py" /> - <Compile Include="test\bytecode_helper.py" /> - <Compile Include="test\coding20731.py" /> - <Compile Include="test\crashers\bogus_code_obj.py" /> - <Compile Include="test\crashers\gc_inspection.py" /> - <Compile Include="test\crashers\infinite_loop_re.py" /> - <Compile Include="test\crashers\mutation_inside_cyclegc.py" /> - <Compile Include="test\crashers\recursive_call.py" /> - <Compile Include="test\crashers\trace_at_recursion_limit.py" /> - <Compile Include="test\crashers\underlying_dict.py" /> - <Compile Include="test\curses_tests.py" /> - <Compile Include="test\datetimetester.py" /> - <Compile Include="test\dis_module.py" /> - <Compile Include="test\doctest_aliases.py" /> - <Compile Include="test\double_const.py" /> - <Compile Include="test\dtracedata\call_stack.py" /> - <Compile Include="test\dtracedata\gc.py" /> - <Compile Include="test\dtracedata\instance.py" /> - <Compile Include="test\dtracedata\line.py" /> - <Compile Include="test\eintrdata\eintr_tester.py" /> - <Compile Include="test\encoded_modules\module_iso_8859_1.py" /> - <Compile Include="test\encoded_modules\module_koi8_r.py" /> - <Compile Include="test\encoded_modules\__init__.py" /> - <Compile Include="test\final_a.py" /> - <Compile Include="test\final_b.py" /> - <Compile Include="test\fork_wait.py" /> - <Compile Include="test\future_test1.py" /> - <Compile Include="test\future_test2.py" /> - <Compile Include="test\gdb_sample.py" /> - <Compile Include="test\imp_dummy.py" /> - <Compile Include="test\inspect_fodder.py" /> - <Compile Include="test\inspect_fodder2.py" /> - <Compile Include="test\leakers\test_ctypes.py" /> - <Compile Include="test\leakers\test_selftype.py" /> - <Compile Include="test\leakers\__init__.py" /> - <Compile Include="test\libregrtest\cmdline.py" /> - <Compile Include="test\libregrtest\main.py" /> - <Compile Include="test\libregrtest\refleak.py" /> - <Compile Include="test\libregrtest\runtest.py" /> - <Compile Include="test\libregrtest\runtest_mp.py" /> - <Compile Include="test\libregrtest\save_env.py" /> - <Compile Include="test\libregrtest\setup.py" /> - <Compile Include="test\libregrtest\__init__.py" /> - <Compile Include="test\list_tests.py" /> - <Compile Include="test\lock_tests.py" /> - <Compile Include="test\make_ssl_certs.py" /> - <Compile Include="test\mapping_tests.py" /> - <Compile Include="test\memory_watchdog.py" /> - <Compile Include="test\mock_socket.py" /> - <Compile Include="test\mod_generics_cache.py" /> - <Compile Include="test\mp_fork_bomb.py" /> - <Compile Include="test\mp_preload.py" /> - <Compile Include="test\multibytecodec_support.py" /> - <Compile Include="test\pickletester.py" /> - <Compile Include="test\profilee.py" /> - <Compile Include="test\pyclbr_input.py" /> - <Compile Include="test\pydocfodder.py" /> - <Compile Include="test\pydoc_mod.py" /> - <Compile Include="test\regrtest.py" /> - <Compile Include="test\relimport.py" /> - <Compile Include="test\reperf.py" /> - <Compile Include="test\re_tests.py" /> - <Compile Include="test\sample_doctest.py" /> - <Compile Include="test\sample_doctest_no_docstrings.py" /> - <Compile Include="test\sample_doctest_no_doctests.py" /> - <Compile Include="test\seq_tests.py" /> - <Compile Include="test\signalinterproctester.py" /> - <Compile Include="test\smtpd.py" /> - <Compile Include="test\sortperf.py" /> - <Compile Include="test\ssltests.py" /> - <Compile Include="test\ssl_servers.py" /> - <Compile Include="test\string_tests.py" /> - <Compile Include="test\subprocessdata\fd_status.py" /> - <Compile Include="test\subprocessdata\input_reader.py" /> - <Compile Include="test\subprocessdata\qcat.py" /> - <Compile Include="test\subprocessdata\qgrep.py" /> - <Compile Include="test\subprocessdata\sigchild_ignore.py" /> - <Compile Include="test\support\script_helper.py" /> - <Compile Include="test\support\__init__.py" /> - <Compile Include="test\testcodec.py" /> - <Compile Include="test\test_abc.py" /> - <Compile Include="test\test_abstract_numbers.py" /> - <Compile Include="test\test_aifc.py" /> - <Compile Include="test\test_argparse.py" /> - <Compile Include="test\test_array.py" /> - <Compile Include="test\test_asdl_parser.py" /> - <Compile Include="test\test_ast.py" /> - <Compile Include="test\test_asyncgen.py" /> - <Compile Include="test\test_asynchat.py" /> - <Compile Include="test\test_asyncio\echo.py" /> - <Compile Include="test\test_asyncio\echo2.py" /> - <Compile Include="test\test_asyncio\echo3.py" /> - <Compile Include="test\test_asyncio\test_base_events.py" /> - <Compile Include="test\test_asyncio\test_events.py" /> - <Compile Include="test\test_asyncio\test_futures.py" /> - <Compile Include="test\test_asyncio\test_locks.py" /> - <Compile Include="test\test_asyncio\test_pep492.py" /> - <Compile Include="test\test_asyncio\test_proactor_events.py" /> - <Compile Include="test\test_asyncio\test_queues.py" /> - <Compile Include="test\test_asyncio\test_selector_events.py" /> - <Compile Include="test\test_asyncio\test_sslproto.py" /> - <Compile Include="test\test_asyncio\test_streams.py" /> - <Compile Include="test\test_asyncio\test_subprocess.py" /> - <Compile Include="test\test_asyncio\test_tasks.py" /> - <Compile Include="test\test_asyncio\test_transports.py" /> - <Compile Include="test\test_asyncio\test_unix_events.py" /> - <Compile Include="test\test_asyncio\test_windows_events.py" /> - <Compile Include="test\test_asyncio\test_windows_utils.py" /> - <Compile Include="test\test_asyncio\__init__.py" /> - <Compile Include="test\test_asyncio\__main__.py" /> - <Compile Include="test\test_asyncore.py" /> - <Compile Include="test\test_atexit.py" /> - <Compile Include="test\test_audioop.py" /> - <Compile Include="test\test_augassign.py" /> - <Compile Include="test\test_base64.py" /> - <Compile Include="test\test_baseexception.py" /> - <Compile Include="test\test_bigaddrspace.py" /> - <Compile Include="test\test_bigmem.py" /> - <Compile Include="test\test_binascii.py" /> - <Compile Include="test\test_binop.py" /> - <Compile Include="test\test_bisect.py" /> - <Compile Include="test\test_bool.py" /> - <Compile Include="test\test_buffer.py" /> - <Compile Include="test\test_bufio.py" /> - <Compile Include="test\test_builtin.py" /> - <Compile Include="test\test_bytes.py" /> - <Compile Include="test\test_bz2.py" /> - <Compile Include="test\test_calendar.py" /> - <Compile Include="test\test_call.py" /> - <Compile Include="test\test_capi.py" /> - <Compile Include="test\test_cgi.py" /> - <Compile Include="test\test_cgitb.py" /> - <Compile Include="test\test_charmapcodec.py" /> - <Compile Include="test\test_class.py" /> - <Compile Include="test\test_cmath.py" /> - <Compile Include="test\test_cmd.py" /> - <Compile Include="test\test_cmd_line.py" /> - <Compile Include="test\test_cmd_line_script.py" /> - <Compile Include="test\test_code.py" /> - <Compile Include="test\test_codeccallbacks.py" /> - <Compile Include="test\test_codecencodings_cn.py" /> - <Compile Include="test\test_codecencodings_hk.py" /> - <Compile Include="test\test_codecencodings_iso2022.py" /> - <Compile Include="test\test_codecencodings_jp.py" /> - <Compile Include="test\test_codecencodings_kr.py" /> - <Compile Include="test\test_codecencodings_tw.py" /> - <Compile Include="test\test_codecmaps_cn.py" /> - <Compile Include="test\test_codecmaps_hk.py" /> - <Compile Include="test\test_codecmaps_jp.py" /> - <Compile Include="test\test_codecmaps_kr.py" /> - <Compile Include="test\test_codecmaps_tw.py" /> - <Compile Include="test\test_codecs.py" /> - <Compile Include="test\test_codeop.py" /> - <Compile Include="test\test_code_module.py" /> - <Compile Include="test\test_collections.py" /> - <Compile Include="test\test_colorsys.py" /> - <Compile Include="test\test_compare.py" /> - <Compile Include="test\test_compile.py" /> - <Compile Include="test\test_compileall.py" /> - <Compile Include="test\test_complex.py" /> - <Compile Include="test\test_concurrent_futures.py" /> - <Compile Include="test\test_configparser.py" /> - <Compile Include="test\test_contains.py" /> - <Compile Include="test\test_contextlib.py" /> - <Compile Include="test\test_contextlib_async.py" /> - <Compile Include="test\test_copy.py" /> - <Compile Include="test\test_copyreg.py" /> - <Compile Include="test\test_coroutines.py" /> - <Compile Include="test\test_cprofile.py" /> - <Compile Include="test\test_crashers.py" /> - <Compile Include="test\test_crypt.py" /> - <Compile Include="test\test_csv.py" /> - <Compile Include="test\test_ctypes\test_anon.py" /> - <Compile Include="test\test_ctypes\test_arrays.py" /> - <Compile Include="test\test_ctypes\test_array_in_pointer.py" /> - <Compile Include="test\test_ctypes\test_as_parameter.py" /> - <Compile Include="test\test_ctypes\test_bitfields.py" /> - <Compile Include="test\test_ctypes\test_buffers.py" /> - <Compile Include="test\test_ctypes\test_bytes.py" /> - <Compile Include="test\test_ctypes\test_byteswap.py" /> - <Compile Include="test\test_ctypes\test_callbacks.py" /> - <Compile Include="test\test_ctypes\test_cast.py" /> - <Compile Include="test\test_ctypes\test_cfuncs.py" /> - <Compile Include="test\test_ctypes\test_checkretval.py" /> - <Compile Include="test\test_ctypes\test_delattr.py" /> - <Compile Include="test\test_ctypes\test_errno.py" /> - <Compile Include="test\test_ctypes\test_find.py" /> - <Compile Include="test\test_ctypes\test_frombuffer.py" /> - <Compile Include="test\test_ctypes\test_funcptr.py" /> - <Compile Include="test\test_ctypes\test_functions.py" /> - <Compile Include="test\test_ctypes\test_incomplete.py" /> - <Compile Include="test\test_ctypes\test_init.py" /> - <Compile Include="test\test_ctypes\test_internals.py" /> - <Compile Include="test\test_ctypes\test_keeprefs.py" /> - <Compile Include="test\test_ctypes\test_libc.py" /> - <Compile Include="test\test_ctypes\test_loading.py" /> - <Compile Include="test\test_ctypes\test_macholib.py" /> - <Compile Include="test\test_ctypes\test_memfunctions.py" /> - <Compile Include="test\test_ctypes\test_numbers.py" /> - <Compile Include="test\test_ctypes\test_objects.py" /> - <Compile Include="test\test_ctypes\test_parameters.py" /> - <Compile Include="test\test_ctypes\test_pep3118.py" /> - <Compile Include="test\test_ctypes\test_pickling.py" /> - <Compile Include="test\test_ctypes\test_pointers.py" /> - <Compile Include="test\test_ctypes\test_prototypes.py" /> - <Compile Include="test\test_ctypes\test_python_api.py" /> - <Compile Include="test\test_ctypes\test_random_things.py" /> - <Compile Include="test\test_ctypes\test_refcounts.py" /> - <Compile Include="test\test_ctypes\test_repr.py" /> - <Compile Include="test\test_ctypes\test_returnfuncptrs.py" /> - <Compile Include="test\test_ctypes\test_simplesubclasses.py" /> - <Compile Include="test\test_ctypes\test_sizes.py" /> - <Compile Include="test\test_ctypes\test_slicing.py" /> - <Compile Include="test\test_ctypes\test_stringptr.py" /> - <Compile Include="test\test_ctypes\test_strings.py" /> - <Compile Include="test\test_ctypes\test_structures.py" /> - <Compile Include="test\test_ctypes\test_struct_fields.py" /> - <Compile Include="test\test_ctypes\test_unaligned_structures.py" /> - <Compile Include="test\test_ctypes\test_unicode.py" /> - <Compile Include="test\test_ctypes\test_values.py" /> - <Compile Include="test\test_ctypes\test_varsize_struct.py" /> - <Compile Include="test\test_ctypes\test_win32.py" /> - <Compile Include="test\test_ctypes\test_wintypes.py" /> - <Compile Include="test\test_ctypes\__init__.py" /> - <Compile Include="test\test_ctypes\__main__.py" /> - <Compile Include="test\test_curses.py" /> - <Compile Include="test\test_datetime.py" /> - <Compile Include="test\test_dbm.py" /> - <Compile Include="test\test_dbm_dumb.py" /> - <Compile Include="test\test_dbm_gnu.py" /> - <Compile Include="test\test_dbm_ndbm.py" /> - <Compile Include="test\test_decimal.py" /> - <Compile Include="test\test_decorators.py" /> - <Compile Include="test\test_defaultdict.py" /> - <Compile Include="test\test_deque.py" /> - <Compile Include="test\test_descr.py" /> - <Compile Include="test\test_descrtut.py" /> - <Compile Include="test\test_devpoll.py" /> - <Compile Include="test\test_dict.py" /> - <Compile Include="test\test_dictcomps.py" /> - <Compile Include="test\test_dictviews.py" /> - <Compile Include="test\test_dict_version.py" /> - <Compile Include="test\test_difflib.py" /> - <Compile Include="test\test_dis.py" /> - <Compile Include="test\test_distutils.py" /> - <Compile Include="test\test_doctest.py" /> - <Compile Include="test\test_doctest2.py" /> - <Compile Include="test\test_docxmlrpc.py" /> - <Compile Include="test\test_dtrace.py" /> - <Compile Include="test\test_dynamic.py" /> - <Compile Include="test\test_dynamicclassattribute.py" /> - <Compile Include="test\test_eintr.py" /> - <Compile Include="test\test_email\test_asian_codecs.py" /> - <Compile Include="test\test_email\test_contentmanager.py" /> - <Compile Include="test\test_email\test_defect_handling.py" /> - <Compile Include="test\test_email\test_email.py" /> - <Compile Include="test\test_email\test_generator.py" /> - <Compile Include="test\test_email\test_headerregistry.py" /> - <Compile Include="test\test_email\test_inversion.py" /> - <Compile Include="test\test_email\test_message.py" /> - <Compile Include="test\test_email\test_parser.py" /> - <Compile Include="test\test_email\test_pickleable.py" /> - <Compile Include="test\test_email\test_policy.py" /> - <Compile Include="test\test_email\test_utils.py" /> - <Compile Include="test\test_email\test__encoded_words.py" /> - <Compile Include="test\test_email\test__header_value_parser.py" /> - <Compile Include="test\test_email\torture_test.py" /> - <Compile Include="test\test_email\__init__.py" /> - <Compile Include="test\test_email\__main__.py" /> - <Compile Include="test\test_ensurepip.py" /> - <Compile Include="test\test_enum.py" /> - <Compile Include="test\test_enumerate.py" /> - <Compile Include="test\test_eof.py" /> - <Compile Include="test\test_epoll.py" /> - <Compile Include="test\test_errno.py" /> - <Compile Include="test\test_exceptions.py" /> - <Compile Include="test\test_exception_hierarchy.py" /> - <Compile Include="test\test_exception_variations.py" /> - <Compile Include="test\test_extcall.py" /> - <Compile Include="test\test_faulthandler.py" /> - <Compile Include="test\test_fcntl.py" /> - <Compile Include="test\test_file.py" /> - <Compile Include="test\test_filecmp.py" /> - <Compile Include="test\test_fileinput.py" /> - <Compile Include="test\test_fileio.py" /> - <Compile Include="test\test_file_eintr.py" /> - <Compile Include="test\test_finalization.py" /> - <Compile Include="test\test_float.py" /> - <Compile Include="test\test_flufl.py" /> - <Compile Include="test\test_fnmatch.py" /> - <Compile Include="test\test_fork1.py" /> - <Compile Include="test\test_format.py" /> - <Compile Include="test\test_fractions.py" /> - <Compile Include="test\test_frame.py" /> - <Compile Include="test\test_fstring.py" /> - <Compile Include="test\test_ftplib.py" /> - <Compile Include="test\test_funcattrs.py" /> - <Compile Include="test\test_functools.py" /> - <Compile Include="test\test_future.py" /> - <Compile Include="test\test_future3.py" /> - <Compile Include="test\test_future4.py" /> - <Compile Include="test\test_future5.py" /> - <Compile Include="test\test_gc.py" /> - <Compile Include="test\test_gdb.py" /> - <Compile Include="test\test_generators.py" /> - <Compile Include="test\test_generator_stop.py" /> - <Compile Include="test\test_genericpath.py" /> - <Compile Include="test\test_genexps.py" /> - <Compile Include="test\test_getargs2.py" /> - <Compile Include="test\test_getopt.py" /> - <Compile Include="test\test_getpass.py" /> - <Compile Include="test\test_gettext.py" /> - <Compile Include="test\test_glob.py" /> - <Compile Include="test\test_global.py" /> - <Compile Include="test\test_grammar.py" /> - <Compile Include="test\test_grp.py" /> - <Compile Include="test\test_gzip.py" /> - <Compile Include="test\test_hash.py" /> - <Compile Include="test\test_hashlib.py" /> - <Compile Include="test\test_heapq.py" /> - <Compile Include="test\test_hmac.py" /> - <Compile Include="test\test_html.py" /> - <Compile Include="test\test_htmlparser.py" /> - <Compile Include="test\test_httplib.py" /> - <Compile Include="test\test_httpservers.py" /> - <Compile Include="test\test_http_cookiejar.py" /> - <Compile Include="test\test_http_cookies.py" /> - <Compile Include="test\test_idle.py" /> - <Compile Include="test\test_imaplib.py" /> - <Compile Include="test\test_imghdr.py" /> - <Compile Include="test\test_imp.py" /> - <Compile Include="test\test_importlib\abc.py" /> - <Compile Include="test\test_importlib\builtin\test_finder.py" /> - <Compile Include="test\test_importlib\builtin\test_loader.py" /> - <Compile Include="test\test_importlib\builtin\__init__.py" /> - <Compile Include="test\test_importlib\builtin\__main__.py" /> - <Compile Include="test\test_importlib\extension\test_case_sensitivity.py" /> - <Compile Include="test\test_importlib\extension\test_finder.py" /> - <Compile Include="test\test_importlib\extension\test_loader.py" /> - <Compile Include="test\test_importlib\extension\test_path_hook.py" /> - <Compile Include="test\test_importlib\extension\__init__.py" /> - <Compile Include="test\test_importlib\extension\__main__.py" /> - <Compile Include="test\test_importlib\frozen\test_finder.py" /> - <Compile Include="test\test_importlib\frozen\test_loader.py" /> - <Compile Include="test\test_importlib\frozen\__init__.py" /> - <Compile Include="test\test_importlib\frozen\__main__.py" /> - <Compile Include="test\test_importlib\import_\test_api.py" /> - <Compile Include="test\test_importlib\import_\test_caching.py" /> - <Compile Include="test\test_importlib\import_\test_fromlist.py" /> - <Compile Include="test\test_importlib\import_\test_meta_path.py" /> - <Compile Include="test\test_importlib\import_\test_packages.py" /> - <Compile Include="test\test_importlib\import_\test_path.py" /> - <Compile Include="test\test_importlib\import_\test_relative_imports.py" /> - <Compile Include="test\test_importlib\import_\test___loader__.py" /> - <Compile Include="test\test_importlib\import_\test___package__.py" /> - <Compile Include="test\test_importlib\import_\__init__.py" /> - <Compile Include="test\test_importlib\import_\__main__.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\both_portions\foo\one.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\both_portions\foo\two.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\module_and_namespace_package\a_test.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\not_a_namespace_pkg\foo\one.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\not_a_namespace_pkg\foo\__init__.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\portion1\foo\one.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\portion2\foo\two.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\project1\parent\child\one.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\project2\parent\child\two.py" /> - <Compile Include="test\test_importlib\namespace_pkgs\project3\parent\child\three.py" /> - <Compile Include="test\test_importlib\source\test_case_sensitivity.py" /> - <Compile Include="test\test_importlib\source\test_file_loader.py" /> - <Compile Include="test\test_importlib\source\test_finder.py" /> - <Compile Include="test\test_importlib\source\test_path_hook.py" /> - <Compile Include="test\test_importlib\source\test_source_encoding.py" /> - <Compile Include="test\test_importlib\source\__init__.py" /> - <Compile Include="test\test_importlib\source\__main__.py" /> - <Compile Include="test\test_importlib\test_abc.py" /> - <Compile Include="test\test_importlib\test_api.py" /> - <Compile Include="test\test_importlib\test_lazy.py" /> - <Compile Include="test\test_importlib\test_locks.py" /> - <Compile Include="test\test_importlib\test_namespace_pkgs.py" /> - <Compile Include="test\test_importlib\test_spec.py" /> - <Compile Include="test\test_importlib\test_util.py" /> - <Compile Include="test\test_importlib\test_windows.py" /> - <Compile Include="test\test_importlib\util.py" /> - <Compile Include="test\test_importlib\__init__.py" /> - <Compile Include="test\test_importlib\__main__.py" /> - <Compile Include="test\test_import\data\circular_imports\basic.py" /> - <Compile Include="test\test_import\data\circular_imports\basic2.py" /> - <Compile Include="test\test_import\data\circular_imports\binding.py" /> - <Compile Include="test\test_import\data\circular_imports\binding2.py" /> - <Compile Include="test\test_import\data\circular_imports\indirect.py" /> - <Compile Include="test\test_import\data\circular_imports\rebinding.py" /> - <Compile Include="test\test_import\data\circular_imports\rebinding2.py" /> - <Compile Include="test\test_import\data\circular_imports\subpackage.py" /> - <Compile Include="test\test_import\data\circular_imports\subpkg\subpackage2.py" /> - <Compile Include="test\test_import\data\circular_imports\subpkg\util.py" /> - <Compile Include="test\test_import\data\circular_imports\util.py" /> - <Compile Include="test\test_import\data\package\__init__.py" /> - <Compile Include="test\test_import\data\package\submodule.py" /> - <Compile Include="test\test_import\data\package2\submodule1.py" /> - <Compile Include="test\test_import\data\package2\submodule2.py" /> - <Compile Include="test\test_import\data\unwritable\__init__.py" /> - <Compile Include="test\test_import\data\unwritable\x.py" /> - <Compile Include="test\test_import\__init__.py" /> - <Compile Include="test\test_import\__main__.py" /> - <Compile Include="test\test_index.py" /> - <Compile Include="test\test_inspect.py" /> - <Compile Include="test\test_int.py" /> - <Compile Include="test\test_int_literal.py" /> - <Compile Include="test\test_io.py" /> - <Compile Include="test\test_ioctl.py" /> - <Compile Include="test\test_ipaddress.py" /> - <Compile Include="test\test_isinstance.py" /> - <Compile Include="test\test_iter.py" /> - <Compile Include="test\test_iterlen.py" /> - <Compile Include="test\test_itertools.py" /> - <Compile Include="test\test_json\test_decode.py" /> - <Compile Include="test\test_json\test_default.py" /> - <Compile Include="test\test_json\test_dump.py" /> - <Compile Include="test\test_json\test_encode_basestring_ascii.py" /> - <Compile Include="test\test_json\test_enum.py" /> - <Compile Include="test\test_json\test_fail.py" /> - <Compile Include="test\test_json\test_float.py" /> - <Compile Include="test\test_json\test_indent.py" /> - <Compile Include="test\test_json\test_pass1.py" /> - <Compile Include="test\test_json\test_pass2.py" /> - <Compile Include="test\test_json\test_pass3.py" /> - <Compile Include="test\test_json\test_recursion.py" /> - <Compile Include="test\test_json\test_scanstring.py" /> - <Compile Include="test\test_json\test_separators.py" /> - <Compile Include="test\test_json\test_speedups.py" /> - <Compile Include="test\test_json\test_tool.py" /> - <Compile Include="test\test_json\test_unicode.py" /> - <Compile Include="test\test_json\__init__.py" /> - <Compile Include="test\test_json\__main__.py" /> - <Compile Include="test\test_keyword.py" /> - <Compile Include="test\test_keywordonlyarg.py" /> - <Compile Include="test\test_kqueue.py" /> - <Compile Include="test\test_largefile.py" /> - <Compile Include="test\test_lib2to3\data\bom.py" /> - <Compile Include="test\test_lib2to3\data\crlf.py" /> - <Compile Include="test\test_lib2to3\data\different_encoding.py" /> - <Compile Include="test\test_lib2to3\data\false_encoding.py" /> - <Compile Include="test\test_lib2to3\data\fixers\bad_order.py" /> - <Compile Include="test\test_lib2to3\data\fixers\myfixes\fix_explicit.py" /> - <Compile Include="test\test_lib2to3\data\fixers\myfixes\fix_first.py" /> - <Compile Include="test\test_lib2to3\data\fixers\myfixes\fix_last.py" /> - <Compile Include="test\test_lib2to3\data\fixers\myfixes\fix_parrot.py" /> - <Compile Include="test\test_lib2to3\data\fixers\myfixes\fix_preorder.py" /> - <Compile Include="test\test_lib2to3\data\fixers\myfixes\__init__.py" /> - <Compile Include="test\test_lib2to3\data\fixers\no_fixer_cls.py" /> - <Compile Include="test\test_lib2to3\data\fixers\parrot_example.py" /> - <Compile Include="test\test_lib2to3\data\infinite_recursion.py" /> - <Compile Include="test\test_lib2to3\data\py2_test_grammar.py" /> - <Compile Include="test\test_lib2to3\data\py3_test_grammar.py" /> - <Compile Include="test\test_lib2to3\pytree_idempotency.py" /> - <Compile Include="test\test_lib2to3\support.py" /> - <Compile Include="test\test_lib2to3\test_all_fixers.py" /> - <Compile Include="test\test_lib2to3\test_fixers.py" /> - <Compile Include="test\test_lib2to3\test_main.py" /> - <Compile Include="test\test_lib2to3\test_parser.py" /> - <Compile Include="test\test_lib2to3\test_pytree.py" /> - <Compile Include="test\test_lib2to3\test_refactor.py" /> - <Compile Include="test\test_lib2to3\test_util.py" /> - <Compile Include="test\test_lib2to3\__init__.py" /> - <Compile Include="test\test_lib2to3\__main__.py" /> - <Compile Include="test\test_linecache.py" /> - <Compile Include="test\test_list.py" /> - <Compile Include="test\test_listcomps.py" /> - <Compile Include="test\test_locale.py" /> - <Compile Include="test\test_logging.py" /> - <Compile Include="test\test_long.py" /> - <Compile Include="test\test_longexp.py" /> - <Compile Include="test\test_lzma.py" /> - <Compile Include="test\test_mailbox.py" /> - <Compile Include="test\test_mailcap.py" /> - <Compile Include="test\test_marshal.py" /> - <Compile Include="test\test_math.py" /> - <Compile Include="test\test_memoryio.py" /> - <Compile Include="test\test_memoryview.py" /> - <Compile Include="test\test_metaclass.py" /> - <Compile Include="test\test_mimetypes.py" /> - <Compile Include="test\test_minidom.py" /> - <Compile Include="test\test_mmap.py" /> - <Compile Include="test\test_module.py" /> - <Compile Include="test\test_modulefinder.py" /> - <Compile Include="test\test_multibytecodec.py" /> - <Compile Include="test\test_multiprocessing_fork.py" /> - <Compile Include="test\test_multiprocessing_forkserver.py" /> - <Compile Include="test\test_multiprocessing_main_handling.py" /> - <Compile Include="test\test_multiprocessing_spawn.py" /> - <Compile Include="test\test_netrc.py" /> - <Compile Include="test\test_nis.py" /> - <Compile Include="test\test_nntplib.py" /> - <Compile Include="test\test_ntpath.py" /> - <Compile Include="test\test_numeric_tower.py" /> - <Compile Include="test\test_opcache.py" /> - <Compile Include="test\test_opcodes.py" /> - <Compile Include="test\test_openpty.py" /> - <Compile Include="test\test_operator.py" /> - <Compile Include="test\test_optparse.py" /> - <Compile Include="test\test_ordered_dict.py" /> - <Compile Include="test\test_os.py" /> - <Compile Include="test\test_ossaudiodev.py" /> - <Compile Include="test\test_osx_env.py" /> - <Compile Include="test\test_parser.py" /> - <Compile Include="test\test_pathlib.py" /> - <Compile Include="test\test_pdb.py" /> - <Compile Include="test\test_peepholer.py" /> - <Compile Include="test\test_peg_generator\__init__.py" /> - <Compile Include="test\test_peg_generator\__main__.py" /> - <Compile Include="test\test_peg_generator\ast_dump.py" /> - <Compile Include="test\test_peg_generator\test_c_parser.py" /> - <Compile Include="test\test_peg_generator\test_first_sets.py" /> - <Compile Include="test\test_peg_generator\test_pegen.py" /> - <Compile Include="test\test_pickle.py" /> - <Compile Include="test\test_pickletools.py" /> - <Compile Include="test\test_pipes.py" /> - <Compile Include="test\test_pkg.py" /> - <Compile Include="test\test_pkgimport.py" /> - <Compile Include="test\test_pkgutil.py" /> - <Compile Include="test\test_platform.py" /> - <Compile Include="test\test_plistlib.py" /> - <Compile Include="test\test_poll.py" /> - <Compile Include="test\test_popen.py" /> - <Compile Include="test\test_poplib.py" /> - <Compile Include="test\test_posix.py" /> - <Compile Include="test\test_posixpath.py" /> - <Compile Include="test\test_pow.py" /> - <Compile Include="test\test_pprint.py" /> - <Compile Include="test\test_print.py" /> - <Compile Include="test\test_profile.py" /> - <Compile Include="test\test_property.py" /> - <Compile Include="test\test_pstats.py" /> - <Compile Include="test\test_pty.py" /> - <Compile Include="test\test_pulldom.py" /> - <Compile Include="test\test_pwd.py" /> - <Compile Include="test\test_pyclbr.py" /> - <Compile Include="test\test_pydoc.py" /> - <Compile Include="test\test_pyexpat.py" /> - <Compile Include="test\test_py_compile.py" /> - <Compile Include="test\test_queue.py" /> - <Compile Include="test\test_quopri.py" /> - <Compile Include="test\test_raise.py" /> - <Compile Include="test\test_random.py" /> - <Compile Include="test\test_range.py" /> - <Compile Include="test\test_re.py" /> - <Compile Include="test\test_readline.py" /> - <Compile Include="test\test_regrtest.py" /> - <Compile Include="test\test_reprlib.py" /> - <Compile Include="test\test_resource.py" /> - <Compile Include="test\test_richcmp.py" /> - <Compile Include="test\test_rlcompleter.py" /> - <Compile Include="test\test_robotparser.py" /> - <Compile Include="test\test_runpy.py" /> - <Compile Include="test\test_sax.py" /> - <Compile Include="test\test_sched.py" /> - <Compile Include="test\test_scope.py" /> - <Compile Include="test\test_script_helper.py" /> - <Compile Include="test\test_secrets.py" /> - <Compile Include="test\test_select.py" /> - <Compile Include="test\test_selectors.py" /> - <Compile Include="test\test_set.py" /> - <Compile Include="test\test_setcomps.py" /> - <Compile Include="test\test_shelve.py" /> - <Compile Include="test\test_shlex.py" /> - <Compile Include="test\test_shutil.py" /> - <Compile Include="test\test_signal.py" /> - <Compile Include="test\test_site.py" /> - <Compile Include="test\test_slice.py" /> - <Compile Include="test\test_smtplib.py" /> - <Compile Include="test\test_smtpnet.py" /> - <Compile Include="test\test_sndhdr.py" /> - <Compile Include="test\test_socket.py" /> - <Compile Include="test\test_socketserver.py" /> - <Compile Include="test\test_sort.py" /> - <Compile Include="test\test_source_encoding.py" /> - <Compile Include="test\test_spwd.py" /> - <Compile Include="test\test_sqlite3" /> - <Compile Include="test\test_sqlite3\__init__.py" /> - <Compile Include="test\test_sqlite3\test_backup.py" /> - <Compile Include="test\test_sqlite3\test_dbapi.py" /> - <Compile Include="test\test_sqlite3\test_dump.py" /> - <Compile Include="test\test_sqlite3\test_factory.py" /> - <Compile Include="test\test_sqlite3\test_hooks.py" /> - <Compile Include="test\test_sqlite3\test_regression.py" /> - <Compile Include="test\test_sqlite3\test_transactions.py" /> - <Compile Include="test\test_sqlite3\test_types.py" /> - <Compile Include="test\test_sqlite3\test_userfunctions.py" /> - <Compile Include="test\test_ssl.py" /> - <Compile Include="test\test_startfile.py" /> - <Compile Include="test\test_stat.py" /> - <Compile Include="test\test_statistics.py" /> - <Compile Include="test\test_strftime.py" /> - <Compile Include="test\test_string.py" /> - <Compile Include="test\test_stringprep.py" /> - <Compile Include="test\test_string_literals.py" /> - <Compile Include="test\test_strptime.py" /> - <Compile Include="test\test_strtod.py" /> - <Compile Include="test\test_struct.py" /> - <Compile Include="test\test_structmembers.py" /> - <Compile Include="test\test_structseq.py" /> - <Compile Include="test\test_subclassinit.py" /> - <Compile Include="test\test_subprocess.py" /> - <Compile Include="test\test_sunau.py" /> - <Compile Include="test\test_sundry.py" /> - <Compile Include="test\test_super.py" /> - <Compile Include="test\test_support.py" /> - <Compile Include="test\test_symbol.py" /> - <Compile Include="test\test_symtable.py" /> - <Compile Include="test\test_syntax.py" /> - <Compile Include="test\test_sys.py" /> - <Compile Include="test\test_sysconfig.py" /> - <Compile Include="test\test_syslog.py" /> - <Compile Include="test\test_sys_setprofile.py" /> - <Compile Include="test\test_sys_settrace.py" /> - <Compile Include="test\test_tarfile.py" /> - <Compile Include="test\test_tcl.py" /> - <Compile Include="test\test_telnetlib.py" /> - <Compile Include="test\test_tempfile.py" /> - <Compile Include="test\test_textwrap.py" /> - <Compile Include="test\test_thread.py" /> - <Compile Include="test\test_threadedtempfile.py" /> - <Compile Include="test\test_threaded_import.py" /> - <Compile Include="test\test_threading.py" /> - <Compile Include="test\test_threading_local.py" /> - <Compile Include="test\test_threadsignals.py" /> - <Compile Include="test\test_time.py" /> - <Compile Include="test\test_timeit.py" /> - <Compile Include="test\test_timeout.py" /> - <Compile Include="test\test_tix.py" /> - <Compile Include="test\test_tkinter\__init__.py" /> - <Compile Include="test\test_tkinter\support.py" /> - <Compile Include="test\test_tkinter\test_font.py" /> - <Compile Include="test\test_tkinter\test_geometry_managers.py" /> - <Compile Include="test\test_tkinter\test_images.py" /> - <Compile Include="test\test_tkinter\test_loadtk.py" /> - <Compile Include="test\test_tkinter\test_misc.py" /> - <Compile Include="test\test_tkinter\test_text.py" /> - <Compile Include="test\test_tkinter\test_variables.py" /> - <Compile Include="test\test_tkinter\test_widgets.py" /> - <Compile Include="test\test_tkinter\widget_tests.py" /> - <Compile Include="test\test_tokenize.py" /> - <Compile Include="test\test_tools\test_i18n.py" /> - <Compile Include="test\test_tools\test_reindent.py" /> - <Compile Include="test\test_tools\test_sundry.py" /> - <Compile Include="test\test_tools\test_unparse.py" /> - <Compile Include="test\test_tools\__init__.py" /> - <Compile Include="test\test_tools\__main__.py" /> - <Compile Include="test\test_trace.py" /> - <Compile Include="test\test_traceback.py" /> - <Compile Include="test\test_tracemalloc.py" /> - <Compile Include="test\test_ttk_textonly.py" /> - <Compile Include="test\test_ttk\__init__.py" /> - <Compile Include="test\test_ttk\test_extensions.py" /> - <Compile Include="test\test_ttk\test_style.py" /> - <Compile Include="test\test_ttk\test_widgets.py" /> - <Compile Include="test\test_tuple.py" /> - <Compile Include="test\test_turtle.py" /> - <Compile Include="test\test_typechecks.py" /> - <Compile Include="test\test_types.py" /> - <Compile Include="test\test_typing.py" /> - <Compile Include="test\test_ucn.py" /> - <Compile Include="test\test_unary.py" /> - <Compile Include="test\test_unicode.py" /> - <Compile Include="test\test_unicodedata.py" /> - <Compile Include="test\test_unicode_file.py" /> - <Compile Include="test\test_unicode_file_functions.py" /> - <Compile Include="test\test_unicode_identifiers.py" /> - <Compile Include="test\test_unittest\dummy.py" /> - <Compile Include="test\test_unittest\support.py" /> - <Compile Include="test\test_unittest\testmock\support.py" /> - <Compile Include="test\test_unittest\testmock\testcallable.py" /> - <Compile Include="test\test_unittest\testmock\testhelpers.py" /> - <Compile Include="test\test_unittest\testmock\testmagicmethods.py" /> - <Compile Include="test\test_unittest\testmock\testmock.py" /> - <Compile Include="test\test_unittest\testmock\testpatch.py" /> - <Compile Include="test\test_unittest\testmock\testsentinel.py" /> - <Compile Include="test\test_unittest\testmock\testwith.py" /> - <Compile Include="test\test_unittest\testmock\__init__.py" /> - <Compile Include="test\test_unittest\testmock\__main__.py" /> - <Compile Include="test\test_unittest\test_assertions.py" /> - <Compile Include="test\test_unittest\test_break.py" /> - <Compile Include="test\test_unittest\test_case.py" /> - <Compile Include="test\test_unittest\test_discovery.py" /> - <Compile Include="test\test_unittest\test_functiontestcase.py" /> - <Compile Include="test\test_unittest\test_loader.py" /> - <Compile Include="test\test_unittest\test_program.py" /> - <Compile Include="test\test_unittest\test_result.py" /> - <Compile Include="test\test_unittest\test_runner.py" /> - <Compile Include="test\test_unittest\test_setups.py" /> - <Compile Include="test\test_unittest\test_skipping.py" /> - <Compile Include="test\test_unittest\test_suite.py" /> - <Compile Include="test\test_unittest\_test_warnings.py" /> - <Compile Include="test\test_unittest\__init__.py" /> - <Compile Include="test\test_unittest\__main__.py" /> - <Compile Include="test\test_univnewlines.py" /> - <Compile Include="test\test_unpack.py" /> - <Compile Include="test\test_unpack_ex.py" /> - <Compile Include="test\test_urllib.py" /> - <Compile Include="test\test_urllib2.py" /> - <Compile Include="test\test_urllib2net.py" /> - <Compile Include="test\test_urllib2_localnet.py" /> - <Compile Include="test\test_urllibnet.py" /> - <Compile Include="test\test_urllib_response.py" /> - <Compile Include="test\test_urlparse.py" /> - <Compile Include="test\test_userdict.py" /> - <Compile Include="test\test_userlist.py" /> - <Compile Include="test\test_userstring.py" /> - <Compile Include="test\test_utf8source.py" /> - <Compile Include="test\test_uu.py" /> - <Compile Include="test\test_uuid.py" /> - <Compile Include="test\test_venv.py" /> - <Compile Include="test\test_wait3.py" /> - <Compile Include="test\test_wait4.py" /> - <Compile Include="test\test_warnings\data\import_warning.py" /> - <Compile Include="test\test_warnings\data\stacklevel.py" /> - <Compile Include="test\test_warnings\__init__.py" /> - <Compile Include="test\test_warnings\__main__.py" /> - <Compile Include="test\test_wave.py" /> - <Compile Include="test\test_weakref.py" /> - <Compile Include="test\test_weakset.py" /> - <Compile Include="test\test_webbrowser.py" /> - <Compile Include="test\test_winconsoleio.py" /> - <Compile Include="test\test_winreg.py" /> - <Compile Include="test\test_winsound.py" /> - <Compile Include="test\test_with.py" /> - <Compile Include="test\test_wsgiref.py" /> - <Compile Include="test\test_xdrlib.py" /> - <Compile Include="test\test_xmlrpc.py" /> - <Compile Include="test\test_xmlrpc_net.py" /> - <Compile Include="test\test_xml_dom_minicompat.py" /> - <Compile Include="test\test_xml_etree.py" /> - <Compile Include="test\test_xml_etree_c.py" /> - <Compile Include="test\test_yield_from.py" /> - <Compile Include="test\test_zipapp.py" /> - <Compile Include="test\test_zipfile.py" /> - <Compile Include="test\test_zipfile64.py" /> - <Compile Include="test\test_zipimport.py" /> - <Compile Include="test\test_zipimport_support.py" /> - <Compile Include="test\test_zlib.py" /> - <Compile Include="test\test_zoneinfo\__init__.py" /> - <Compile Include="test\test_zoneinfo\__main__.py" /> - <Compile Include="test\test_zoneinfo\_support.py" /> - <Compile Include="test\test_zoneinfo\test_zoneinfo.py" /> - <Compile Include="test\test__locale.py" /> - <Compile Include="test\test__opcode.py" /> - <Compile Include="test\test__osx_support.py" /> - <Compile Include="test\test___all__.py" /> - <Compile Include="test\test___future__.py" /> - <Compile Include="test\tf_inherit_check.py" /> - <Compile Include="test\threaded_import_hangers.py" /> - <Compile Include="test\time_hashlib.py" /> - <Compile Include="test\tracedmodules\testmod.py" /> - <Compile Include="test\tracedmodules\__init__.py" /> - <Compile Include="test\win_console_handler.py" /> - <Compile Include="test\xmltests.py" /> - <Compile Include="test\_test_multiprocessing.py" /> - <Compile Include="test\__init__.py" /> - <Compile Include="test\__main__.py" /> - <Compile Include="textwrap.py" /> - <Compile Include="this.py" /> - <Compile Include="threading.py" /> - <Compile Include="timeit.py" /> - <Compile Include="tkinter\colorchooser.py" /> - <Compile Include="tkinter\commondialog.py" /> - <Compile Include="tkinter\constants.py" /> - <Compile Include="tkinter\dialog.py" /> - <Compile Include="tkinter\dnd.py" /> - <Compile Include="tkinter\filedialog.py" /> - <Compile Include="tkinter\font.py" /> - <Compile Include="tkinter\messagebox.py" /> - <Compile Include="tkinter\scrolledtext.py" /> - <Compile Include="tkinter\simpledialog.py" /> - <Compile Include="tkinter\tix.py" /> - <Compile Include="tkinter\ttk.py" /> - <Compile Include="tkinter\__init__.py" /> - <Compile Include="tkinter\__main__.py" /> - <Compile Include="token.py" /> - <Compile Include="tokenize.py" /> - <Compile Include="trace.py" /> - <Compile Include="traceback.py" /> - <Compile Include="tracemalloc.py" /> - <Compile Include="tty.py" /> - <Compile Include="turtle.py" /> - <Compile Include="turtledemo\bytedesign.py" /> - <Compile Include="turtledemo\chaos.py" /> - <Compile Include="turtledemo\clock.py" /> - <Compile Include="turtledemo\colormixer.py" /> - <Compile Include="turtledemo\forest.py" /> - <Compile Include="turtledemo\fractalcurves.py" /> - <Compile Include="turtledemo\lindenmayer.py" /> - <Compile Include="turtledemo\minimal_hanoi.py" /> - <Compile Include="turtledemo\nim.py" /> - <Compile Include="turtledemo\paint.py" /> - <Compile Include="turtledemo\peace.py" /> - <Compile Include="turtledemo\penrose.py" /> - <Compile Include="turtledemo\planet_and_moon.py" /> - <Compile Include="turtledemo\round_dance.py" /> - <Compile Include="turtledemo\sorting_animate.py" /> - <Compile Include="turtledemo\tree.py" /> - <Compile Include="turtledemo\two_canvases.py" /> - <Compile Include="turtledemo\wikipedia.py" /> - <Compile Include="turtledemo\yinyang.py" /> - <Compile Include="turtledemo\__init__.py" /> - <Compile Include="turtledemo\__main__.py" /> - <Compile Include="types.py" /> - <Compile Include="typing.py" /> - <Compile Include="unittest\case.py" /> - <Compile Include="unittest\loader.py" /> - <Compile Include="unittest\main.py" /> - <Compile Include="unittest\mock.py" /> - <Compile Include="unittest\result.py" /> - <Compile Include="unittest\runner.py" /> - <Compile Include="unittest\signals.py" /> - <Compile Include="unittest\suite.py" /> - <Compile Include="unittest\util.py" /> - <Compile Include="unittest\__init__.py" /> - <Compile Include="unittest\__main__.py" /> - <Compile Include="urllib\error.py" /> - <Compile Include="urllib\parse.py" /> - <Compile Include="urllib\request.py" /> - <Compile Include="urllib\response.py" /> - <Compile Include="urllib\robotparser.py" /> - <Compile Include="urllib\__init__.py" /> - <Compile Include="uu.py" /> - <Compile Include="uuid.py" /> - <Compile Include="venv\__init__.py" /> - <Compile Include="venv\__main__.py" /> - <Compile Include="warnings.py" /> - <Compile Include="wave.py" /> - <Compile Include="weakref.py" /> - <Compile Include="webbrowser.py" /> - <Compile Include="wsgiref\handlers.py" /> - <Compile Include="wsgiref\headers.py" /> - <Compile Include="wsgiref\simple_server.py" /> - <Compile Include="wsgiref\util.py" /> - <Compile Include="wsgiref\validate.py" /> - <Compile Include="wsgiref\__init__.py" /> - <Compile Include="xdrlib.py" /> - <Compile Include="xmlrpc\client.py" /> - <Compile Include="xmlrpc\server.py" /> - <Compile Include="xmlrpc\__init__.py" /> - <Compile Include="xml\dom\domreg.py" /> - <Compile Include="xml\dom\expatbuilder.py" /> - <Compile Include="xml\dom\minicompat.py" /> - <Compile Include="xml\dom\minidom.py" /> - <Compile Include="xml\dom\NodeFilter.py" /> - <Compile Include="xml\dom\pulldom.py" /> - <Compile Include="xml\dom\xmlbuilder.py" /> - <Compile Include="xml\dom\__init__.py" /> - <Compile Include="xml\etree\cElementTree.py" /> - <Compile Include="xml\etree\ElementInclude.py" /> - <Compile Include="xml\etree\ElementPath.py" /> - <Compile Include="xml\etree\ElementTree.py" /> - <Compile Include="xml\etree\__init__.py" /> - <Compile Include="xml\parsers\expat.py" /> - <Compile Include="xml\parsers\__init__.py" /> - <Compile Include="xml\sax\expatreader.py" /> - <Compile Include="xml\sax\handler.py" /> - <Compile Include="xml\sax\saxutils.py" /> - <Compile Include="xml\sax\xmlreader.py" /> - <Compile Include="xml\sax\_exceptions.py" /> - <Compile Include="xml\sax\__init__.py" /> - <Compile Include="xml\__init__.py" /> - <Compile Include="zipapp.py" /> - <Compile Include="zipfile.py" /> - <Compile Include="zoneinfo\_common.py" /> - <Compile Include="zoneinfo\__init__.py" /> - <Compile Include="zoneinfo\_tzpath.py" /> - <Compile Include="zoneinfo\_zoneinfo.py" /> - <Compile Include="_collections_abc.py" /> - <Compile Include="_compat_pickle.py" /> - <Compile Include="_compression.py" /> - <Compile Include="_markupbase.py" /> - <Compile Include="_osx_support.py" /> - <Compile Include="_pydecimal.py" /> - <Compile Include="_pyio.py" /> - <Compile Include="_sitebuiltins.py" /> - <Compile Include="_strptime.py" /> - <Compile Include="_threading_local.py" /> - <Compile Include="_weakrefset.py" /> - <Compile Include="__future__.py" /> - <Compile Include="__phello__.foo.py" /> - </ItemGroup> - <ItemGroup> - <Content Include="idlelib\CREDITS.txt" /> - <Content Include="idlelib\extend.txt" /> - <Content Include="idlelib\help.html" /> - <Content Include="idlelib\HISTORY.txt" /> - <Content Include="idlelib\Icons\folder.gif" /> - <Content Include="idlelib\Icons\idle.ico" /> - <Content Include="idlelib\Icons\idle_16.gif" /> - <Content Include="idlelib\Icons\idle_16.png" /> - <Content Include="idlelib\Icons\idle_32.gif" /> - <Content Include="idlelib\Icons\idle_32.png" /> - <Content Include="idlelib\Icons\idle_48.gif" /> - <Content Include="idlelib\Icons\idle_48.png" /> - <Content Include="idlelib\Icons\idle_256.png" /> - <Content Include="idlelib\Icons\minusnode.gif" /> - <Content Include="idlelib\Icons\openfolder.gif" /> - <Content Include="idlelib\Icons\plusnode.gif" /> - <Content Include="idlelib\Icons\python.gif" /> - <Content Include="idlelib\Icons\tk.gif" /> - <Content Include="idlelib\idle_test\README.txt" /> - <Content Include="idlelib\NEWS.txt" /> - <Content Include="idlelib\NEWS2x.txt" /> - <Content Include="idlelib\README.txt" /> - <Content Include="idlelib\TODO.txt" /> - <Content Include="lib2to3\Grammar.txt" /> - <Content Include="lib2to3\PatternGrammar.txt" /> - <Content Include="pydoc_data\_pydoc.css" /> - <Content Include="site-packages\README.txt" /> - <Content Include="test\cjkencodings\big5-utf8.txt" /> - <Content Include="test\cjkencodings\big5.txt" /> - <Content Include="test\cjkencodings\big5hkscs-utf8.txt" /> - <Content Include="test\cjkencodings\big5hkscs.txt" /> - <Content Include="test\cjkencodings\cp949-utf8.txt" /> - <Content Include="test\cjkencodings\cp949.txt" /> - <Content Include="test\cjkencodings\euc_jisx0213-utf8.txt" /> - <Content Include="test\cjkencodings\euc_jisx0213.txt" /> - <Content Include="test\cjkencodings\euc_jp-utf8.txt" /> - <Content Include="test\cjkencodings\euc_jp.txt" /> - <Content Include="test\cjkencodings\euc_kr-utf8.txt" /> - <Content Include="test\cjkencodings\euc_kr.txt" /> - <Content Include="test\cjkencodings\gb18030-utf8.txt" /> - <Content Include="test\cjkencodings\gb18030.txt" /> - <Content Include="test\cjkencodings\gb2312-utf8.txt" /> - <Content Include="test\cjkencodings\gb2312.txt" /> - <Content Include="test\cjkencodings\gbk-utf8.txt" /> - <Content Include="test\cjkencodings\gbk.txt" /> - <Content Include="test\cjkencodings\hz-utf8.txt" /> - <Content Include="test\cjkencodings\hz.txt" /> - <Content Include="test\cjkencodings\iso2022_jp-utf8.txt" /> - <Content Include="test\cjkencodings\iso2022_jp.txt" /> - <Content Include="test\cjkencodings\iso2022_kr-utf8.txt" /> - <Content Include="test\cjkencodings\iso2022_kr.txt" /> - <Content Include="test\cjkencodings\johab-utf8.txt" /> - <Content Include="test\cjkencodings\johab.txt" /> - <Content Include="test\cjkencodings\shift_jis-utf8.txt" /> - <Content Include="test\cjkencodings\shift_jis.txt" /> - <Content Include="test\cjkencodings\shift_jisx0213-utf8.txt" /> - <Content Include="test\cjkencodings\shift_jisx0213.txt" /> - <Content Include="test\cmath_testcases.txt" /> - <Content Include="test\exception_hierarchy.txt" /> - <Content Include="test\floating_points.txt" /> - <Content Include="test\formatfloat_testcases.txt" /> - <Content Include="test\ieee754.txt" /> - <Content Include="test\imghdrdata\python.bmp" /> - <Content Include="test\imghdrdata\python.gif" /> - <Content Include="test\imghdrdata\python.jpg" /> - <Content Include="test\imghdrdata\python.png" /> - <Content Include="test\leakers\README.txt" /> - <Content Include="test\mailcap.txt" /> - <Content Include="test\math_testcases.txt" /> - <Content Include="test\sgml_input.html" /> - <Content Include="test\test_difflib_expect.html" /> - <Content Include="test\test_doctest.txt" /> - <Content Include="test\test_doctest2.txt" /> - <Content Include="test\test_doctest3.txt" /> - <Content Include="test\test_doctest4.txt" /> - <Content Include="test\test_email\data\msg_01.txt" /> - <Content Include="test\test_email\data\msg_02.txt" /> - <Content Include="test\test_email\data\msg_03.txt" /> - <Content Include="test\test_email\data\msg_04.txt" /> - <Content Include="test\test_email\data\msg_05.txt" /> - <Content Include="test\test_email\data\msg_06.txt" /> - <Content Include="test\test_email\data\msg_07.txt" /> - <Content Include="test\test_email\data\msg_08.txt" /> - <Content Include="test\test_email\data\msg_09.txt" /> - <Content Include="test\test_email\data\msg_10.txt" /> - <Content Include="test\test_email\data\msg_11.txt" /> - <Content Include="test\test_email\data\msg_12.txt" /> - <Content Include="test\test_email\data\msg_12a.txt" /> - <Content Include="test\test_email\data\msg_13.txt" /> - <Content Include="test\test_email\data\msg_14.txt" /> - <Content Include="test\test_email\data\msg_15.txt" /> - <Content Include="test\test_email\data\msg_16.txt" /> - <Content Include="test\test_email\data\msg_17.txt" /> - <Content Include="test\test_email\data\msg_18.txt" /> - <Content Include="test\test_email\data\msg_19.txt" /> - <Content Include="test\test_email\data\msg_20.txt" /> - <Content Include="test\test_email\data\msg_21.txt" /> - <Content Include="test\test_email\data\msg_22.txt" /> - <Content Include="test\test_email\data\msg_23.txt" /> - <Content Include="test\test_email\data\msg_24.txt" /> - <Content Include="test\test_email\data\msg_25.txt" /> - <Content Include="test\test_email\data\msg_26.txt" /> - <Content Include="test\test_email\data\msg_27.txt" /> - <Content Include="test\test_email\data\msg_28.txt" /> - <Content Include="test\test_email\data\msg_29.txt" /> - <Content Include="test\test_email\data\msg_30.txt" /> - <Content Include="test\test_email\data\msg_31.txt" /> - <Content Include="test\test_email\data\msg_32.txt" /> - <Content Include="test\test_email\data\msg_33.txt" /> - <Content Include="test\test_email\data\msg_34.txt" /> - <Content Include="test\test_email\data\msg_35.txt" /> - <Content Include="test\test_email\data\msg_36.txt" /> - <Content Include="test\test_email\data\msg_37.txt" /> - <Content Include="test\test_email\data\msg_38.txt" /> - <Content Include="test\test_email\data\msg_39.txt" /> - <Content Include="test\test_email\data\msg_40.txt" /> - <Content Include="test\test_email\data\msg_41.txt" /> - <Content Include="test\test_email\data\msg_42.txt" /> - <Content Include="test\test_email\data\msg_43.txt" /> - <Content Include="test\test_email\data\msg_44.txt" /> - <Content Include="test\test_email\data\msg_45.txt" /> - <Content Include="test\test_email\data\msg_46.txt" /> - <Content Include="test\test_email\data\PyBanner048.gif" /> - <Content Include="test\tokenize_tests-latin1-coding-cookie-and-utf8-bom-sig.txt" /> - <Content Include="test\tokenize_tests-no-coding-cookie-and-utf8-bom-sig-only.txt" /> - <Content Include="test\tokenize_tests-utf8-coding-cookie-and-no-utf8-bom-sig.txt" /> - <Content Include="test\tokenize_tests-utf8-coding-cookie-and-utf8-bom-sig.txt" /> - <Content Include="test\tokenize_tests.txt" /> - </ItemGroup> - <ItemGroup> - <Folder Include="asyncio" /> - <Folder Include="collections" /> - <Folder Include="concurrent" /> - <Folder Include="concurrent\futures" /> - <Folder Include="ctypes" /> - <Folder Include="ctypes\macholib" /> - <Folder Include="curses" /> - <Folder Include="dbm" /> - <Folder Include="distutils" /> - <Folder Include="distutils\command" /> - <Folder Include="distutils\tests" /> - <Folder Include="email" /> - <Folder Include="email\mime" /> - <Folder Include="encodings" /> - <Folder Include="ensurepip" /> - <Folder Include="html" /> - <Folder Include="http" /> - <Folder Include="idlelib" /> - <Folder Include="idlelib\Icons" /> - <Folder Include="idlelib\idle_test" /> - <Folder Include="importlib" /> - <Folder Include="json" /> - <Folder Include="lib2to3" /> - <Folder Include="lib2to3\fixes" /> - <Folder Include="lib2to3\pgen2" /> - <Folder Include="logging" /> - <Folder Include="msilib" /> - <Folder Include="multiprocessing" /> - <Folder Include="multiprocessing\dummy" /> - <Folder Include="pydoc_data" /> - <Folder Include="site-packages" /> - <Folder Include="sqlite3" /> - <Folder Include="sqlite3\test" /> - <Folder Include="test" /> - <Folder Include="test\cjkencodings" /> - <Folder Include="test\crashers" /> - <Folder Include="test\dtracedata" /> - <Folder Include="test\eintrdata" /> - <Folder Include="test\encoded_modules" /> - <Folder Include="test\imghdrdata" /> - <Folder Include="test\leakers" /> - <Folder Include="test\libregrtest" /> - <Folder Include="test\subprocessdata" /> - <Folder Include="test\support" /> - <Folder Include="test\test_asyncio" /> - <Folder Include="test\test_ctypes" /> - <Folder Include="test\test_email" /> - <Folder Include="test\test_email\data" /> - <Folder Include="test\test_import" /> - <Folder Include="test\test_importlib" /> - <Folder Include="test\test_importlib\builtin" /> - <Folder Include="test\test_importlib\extension" /> - <Folder Include="test\test_importlib\frozen" /> - <Folder Include="test\test_importlib\import_" /> - <Folder Include="test\test_importlib\namespace_pkgs\" /> - <Folder Include="test\test_importlib\namespace_pkgs\both_portions\" /> - <Folder Include="test\test_importlib\namespace_pkgs\both_portions\foo" /> - <Folder Include="test\test_importlib\namespace_pkgs\module_and_namespace_package" /> - <Folder Include="test\test_importlib\namespace_pkgs\not_a_namespace_pkg\" /> - <Folder Include="test\test_importlib\namespace_pkgs\not_a_namespace_pkg\foo" /> - <Folder Include="test\test_importlib\namespace_pkgs\portion1\" /> - <Folder Include="test\test_importlib\namespace_pkgs\portion1\foo" /> - <Folder Include="test\test_importlib\namespace_pkgs\portion2\" /> - <Folder Include="test\test_importlib\namespace_pkgs\portion2\foo" /> - <Folder Include="test\test_importlib\namespace_pkgs\project1\" /> - <Folder Include="test\test_importlib\namespace_pkgs\project1\parent\" /> - <Folder Include="test\test_importlib\namespace_pkgs\project1\parent\child" /> - <Folder Include="test\test_importlib\namespace_pkgs\project2\" /> - <Folder Include="test\test_importlib\namespace_pkgs\project2\parent\" /> - <Folder Include="test\test_importlib\namespace_pkgs\project2\parent\child" /> - <Folder Include="test\test_importlib\namespace_pkgs\project3\" /> - <Folder Include="test\test_importlib\namespace_pkgs\project3\parent\" /> - <Folder Include="test\test_importlib\namespace_pkgs\project3\parent\child" /> - <Folder Include="test\test_importlib\source" /> - <Folder Include="test\test_import\data\" /> - <Folder Include="test\test_import\data\circular_imports" /> - <Folder Include="test\test_import\data\circular_imports\subpkg" /> - <Folder Include="test\test_import\data\package" /> - <Folder Include="test\test_import\data\package2" /> - <Folder Include="test\test_json" /> - <Folder Include="test\test_lib2to3" /> - <Folder Include="test\test_lib2to3\data" /> - <Folder Include="test\test_lib2to3\data\fixers" /> - <Folder Include="test\test_lib2to3\data\fixers\myfixes" /> - <Folder Include="test\test_peg_generator" /> - <Folder Include="test\test_tkinter" /> - <Folder Include="test\test_tools" /> - <Folder Include="test\test_ttk" /> - <Folder Include="test\test_unittest" /> - <Folder Include="test\test_unittest\testmock" /> - <Folder Include="test\test_warnings" /> - <Folder Include="test\test_warnings\data" /> - <Folder Include="test\tracedmodules" /> - <Folder Include="tkinter" /> - <Folder Include="turtledemo" /> - <Folder Include="unittest" /> - <Folder Include="urllib" /> - <Folder Include="venv" /> - <Folder Include="wsgiref" /> - <Folder Include="xml" /> - <Folder Include="xmlrpc" /> - <Folder Include="xml\dom" /> - <Folder Include="xml\etree" /> - <Folder Include="xml\parsers" /> - <Folder Include="xml\sax" /> - </ItemGroup> - <Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets" /> -</Project> diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index d9e4d981aa2189..e13a0d409293f4 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -77,7 +77,7 @@ <ExtensionModules Include="@(ExternalModules->'%(Identity)')" Condition="$(IncludeExternals)" /> <Projects Include="@(ExtensionModules->'%(Identity).vcxproj')" Condition="$(IncludeExtensions)" /> <!-- Test modules --> - <TestModules Include="_ctypes_test;_testbuffer;_testcapi;_testinternalcapi;_testembed;_testimportmultiple;_testmultiphase;_testconsole" /> + <TestModules Include="_ctypes_test;_testbuffer;_testcapi;_testinternalcapi;_testembed;_testimportmultiple;_testmultiphase;_testsinglephase;_testconsole" /> <TestModules Include="xxlimited" Condition="'$(Configuration)' == 'Release'" /> <TestModules Include="xxlimited_35" Condition="'$(Configuration)' == 'Release'" /> <Projects Include="@(TestModules->'%(Identity).vcxproj')" Condition="$(IncludeTests)"> diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 9f374abd152b17..848d59504381cc 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -10,7 +10,42 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "python", "python.vcxproj", "{B11D750F-CD1F-4A96-85CE-E69A5C5259F9}" ProjectSection(ProjectDependencies) = postProject + {9E48B300-37D1-11DD-8C41-005056C00008} = {9E48B300-37D1-11DD-8C41-005056C00008} + {9EC7190A-249F-4180-A900-548FDCF3055F} = {9EC7190A-249F-4180-A900-548FDCF3055F} + {78D80A15-BD8C-44E2-B49E-1F05B0A0A687} = {78D80A15-BD8C-44E2-B49E-1F05B0A0A687} + {6901D91C-6E48-4BB7-9FEC-700C8131DF1D} = {6901D91C-6E48-4BB7-9FEC-700C8131DF1D} + {54B1431F-B86B-4ACB-B28C-88BCF93191D8} = {54B1431F-B86B-4ACB-B28C-88BCF93191D8} + {F749B822-B489-4CA5-A3AD-CE078F5F338A} = {F749B822-B489-4CA5-A3AD-CE078F5F338A} + {D06B6426-4762-44CC-8BAD-D79052507F2F} = {D06B6426-4762-44CC-8BAD-D79052507F2F} + {36D0C52C-DF4E-45D0-8BC7-E294C3ABC781} = {36D0C52C-DF4E-45D0-8BC7-E294C3ABC781} + {CB435430-EBB1-478B-8F4E-C256F6838F55} = {CB435430-EBB1-478B-8F4E-C256F6838F55} + {17E1E049-C309-4D79-843F-AE483C264AEA} = {17E1E049-C309-4D79-843F-AE483C264AEA} + {384C224A-7474-476E-A01B-750EA7DE918C} = {384C224A-7474-476E-A01B-750EA7DE918C} + {12728250-16EC-4DC6-94D7-E21DD88947F8} = {12728250-16EC-4DC6-94D7-E21DD88947F8} + {86937F53-C189-40EF-8CE8-8759D8E7D480} = {86937F53-C189-40EF-8CE8-8759D8E7D480} + {28B5D777-DDF2-4B6B-B34F-31D938813856} = {28B5D777-DDF2-4B6B-B34F-31D938813856} + {31FFC478-7B4A-43E8-9954-8D03E2187E9C} = {31FFC478-7B4A-43E8-9954-8D03E2187E9C} + {F9D71780-F393-11E0-BE50-0800200C9A66} = {F9D71780-F393-11E0-BE50-0800200C9A66} + {494BAC80-A60C-43A9-99E7-ACB691CE2C4D} = {494BAC80-A60C-43A9-99E7-ACB691CE2C4D} + {C6E20F84-3247-4AD6-B051-B073268F73BA} = {C6E20F84-3247-4AD6-B051-B073268F73BA} + {B244E787-C445-441C-BDF4-5A4F1A3A1E51} = {B244E787-C445-441C-BDF4-5A4F1A3A1E51} + {18CAE28C-B454-46C1-87A0-493D91D97F03} = {18CAE28C-B454-46C1-87A0-493D91D97F03} + {13CECB97-4119-4316-9D42-8534019A5A44} = {13CECB97-4119-4316-9D42-8534019A5A44} + {885D4898-D08D-4091-9C40-C700CFE3FC5A} = {885D4898-D08D-4091-9C40-C700CFE3FC5A} + {447F05A8-F581-4CAC-A466-5AC7936E207E} = {447F05A8-F581-4CAC-A466-5AC7936E207E} + {ECC7CEAC-A5E5-458E-BB9E-2413CC847881} = {ECC7CEAC-A5E5-458E-BB9E-2413CC847881} + {4946ECAC-2E69-4BF8-A90A-F5136F5094DF} = {4946ECAC-2E69-4BF8-A90A-F5136F5094DF} + {FDB84CBB-2FB6-47C8-A2D6-091E0833239D} = {FDB84CBB-2FB6-47C8-A2D6-091E0833239D} + {73FCD2BD-F133-46B7-8EC1-144CD82A59D5} = {73FCD2BD-F133-46B7-8EC1-144CD82A59D5} + {2097F1C1-597C-4167-93E3-656A7D6339B2} = {2097F1C1-597C-4167-93E3-656A7D6339B2} + {A2697BD3-28C1-4AEC-9106-8B748639FD16} = {A2697BD3-28C1-4AEC-9106-8B748639FD16} + {900342D7-516A-4469-B1AD-59A66E49A25F} = {900342D7-516A-4469-B1AD-59A66E49A25F} + {6DAC66D9-E703-4624-BE03-49112AB5AA62} = {6DAC66D9-E703-4624-BE03-49112AB5AA62} + {0E9791DB-593A-465F-98BC-681011311617} = {0E9791DB-593A-465F-98BC-681011311617} {0E9791DB-593A-465F-98BC-681011311618} = {0E9791DB-593A-465F-98BC-681011311618} + {EB6E69DD-04BF-4543-9B92-49FAABCEAC2E} = {EB6E69DD-04BF-4543-9B92-49FAABCEAC2E} + {16BFE6F0-22EF-40B5-B831-7E937119EF10} = {16BFE6F0-22EF-40B5-B831-7E937119EF10} + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742} = {FCBE1EF2-E0F0-40B1-88B5-00A35D378742} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythoncore", "pythoncore.vcxproj", "{CF7AC3D1-E2DF-41D2-BEA6-1E2556CDEA26}" @@ -19,6 +54,9 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythoncore", "pythoncore.vc EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythonw", "pythonw.vcxproj", "{F4229CC3-873C-49AE-9729-DD308ED4CD4A}" + ProjectSection(ProjectDependencies) = postProject + {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} = {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} + EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winsound", "winsound.vcxproj", "{28B5D777-DDF2-4B6B-B34F-31D938813856}" EndProject @@ -86,6 +124,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testembed", "_testembed.vc EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testmultiphase", "_testmultiphase.vcxproj", "{16BFE6F0-22EF-40B5-B831-7E937119EF10}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testsinglephase", "_testsinglephase.vcxproj", "{2097F1C1-597C-4167-93E3-656A7D6339B2}" +EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pyshellext", "pyshellext.vcxproj", "{0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testconsole", "_testconsole.vcxproj", "{B244E787-C445-441C-BDF4-5A4F1A3A1E51}" @@ -99,12 +139,18 @@ EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "liblzma", "liblzma.vcxproj", "{12728250-16EC-4DC6-94D7-E21DD88947F8}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "python_uwp", "python_uwp.vcxproj", "{9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}" + ProjectSection(ProjectDependencies) = postProject + {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} = {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} + EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "venvlauncher", "venvlauncher.vcxproj", "{494BAC80-A60C-43A9-99E7-ACB691CE2C4D}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "venvwlauncher", "venvwlauncher.vcxproj", "{FDB84CBB-2FB6-47C8-A2D6-091E0833239D}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythonw_uwp", "pythonw_uwp.vcxproj", "{AB603547-1E2A-45B3-9E09-B04596006393}" + ProjectSection(ProjectDependencies) = postProject + {F4229CC3-873C-49AE-9729-DD308ED4CD4A} = {F4229CC3-873C-49AE-9729-DD308ED4CD4A} + EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_uuid", "_uuid.vcxproj", "{CB435430-EBB1-478B-8F4E-C256F6838F55}" EndProject @@ -1168,6 +1214,36 @@ Global {16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|Win32.Build.0 = Release|Win32 {16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|x64.ActiveCfg = Release|x64 {16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|x64.Build.0 = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM.ActiveCfg = Debug|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM.Build.0 = Debug|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM64.Build.0 = Debug|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|Win32.ActiveCfg = Debug|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|Win32.Build.0 = Debug|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|x64.ActiveCfg = Debug|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|x64.Build.0 = Debug|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM.Build.0 = PGInstrument|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM64.Build.0 = PGInstrument|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|Win32.ActiveCfg = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|x64.ActiveCfg = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM.Build.0 = PGUpdate|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM64.Build.0 = PGUpdate|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|Win32.ActiveCfg = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|Win32.Build.0 = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|x64.ActiveCfg = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|x64.Build.0 = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM.ActiveCfg = Release|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM.Build.0 = Release|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM64.ActiveCfg = Release|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM64.Build.0 = Release|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|Win32.ActiveCfg = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|Win32.Build.0 = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|x64.ActiveCfg = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|x64.Build.0 = Release|x64 {0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}.Debug|ARM.ActiveCfg = Debug|ARM {0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}.Debug|ARM.Build.0 = Debug|ARM {0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}.Debug|ARM64.ActiveCfg = Debug|ARM64 diff --git a/PCbuild/python.props b/PCbuild/python.props index 5fa32dfffd17da..320d41f4cc20d8 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -71,7 +71,7 @@ <opensslOutDir>$(ExternalsDir)openssl-bin-1.1.1q\$(ArchName)\</opensslOutDir> <opensslIncludeDir>$(opensslOutDir)include</opensslIncludeDir> <nasmDir>$(ExternalsDir)\nasm-2.11.06\</nasmDir> - <zlibDir>$(ExternalsDir)\zlib-1.2.12\</zlibDir> + <zlibDir>$(ExternalsDir)\zlib-1.2.13\</zlibDir> <!-- Suffix for all binaries when building for debug --> <PyDebugExt Condition="'$(PyDebugExt)' == '' and $(Configuration) == 'Debug'">_d</PyDebugExt> diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 111ad67f7da06a..bb2aaae3317b02 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -155,6 +155,7 @@ <ClInclude Include="..\Include\cpython\listobject.h" /> <ClInclude Include="..\Include\cpython\longintrepr.h" /> <ClInclude Include="..\Include\cpython\longobject.h" /> + <ClInclude Include="..\Include\cpython\memoryobject.h" /> <ClInclude Include="..\Include\cpython\methodobject.h" /> <ClInclude Include="..\Include\cpython\modsupport.h" /> <ClInclude Include="..\Include\cpython\object.h" /> @@ -203,13 +204,17 @@ <ClInclude Include="..\Include\internal\pycore_bytesobject.h" /> <ClInclude Include="..\Include\internal\pycore_call.h" /> <ClInclude Include="..\Include\internal\pycore_ceval.h" /> + <ClInclude Include="..\Include\internal\pycore_ceval_state.h" /> <ClInclude Include="..\Include\internal\pycore_code.h" /> <ClInclude Include="..\Include\internal\pycore_compile.h" /> <ClInclude Include="..\Include\internal\pycore_condvar.h" /> <ClInclude Include="..\Include\internal\pycore_context.h" /> <ClInclude Include="..\Include\internal\pycore_descrobject.h" /> + <ClInclude Include="..\Include\internal\pycore_dict.h" /> + <ClInclude Include="..\Include\internal\pycore_dict_state.h" /> <ClInclude Include="..\Include\internal\pycore_dtoa.h" /> <ClInclude Include="..\Include\internal\pycore_exceptions.h" /> + <ClInclude Include="..\Include\internal\pycore_faulthandler.h" /> <ClInclude Include="..\Include\internal\pycore_fileutils.h" /> <ClInclude Include="..\Include\internal\pycore_floatobject.h" /> <ClInclude Include="..\Include\internal\pycore_format.h" /> @@ -219,6 +224,7 @@ <ClInclude Include="..\Include\internal\pycore_getopt.h" /> <ClInclude Include="..\Include\internal\pycore_gil.h" /> <ClInclude Include="..\Include\internal\pycore_global_objects.h" /> + <ClInclude Include="..\Include\internal\pycore_global_objects_fini_generated.h" /> <ClInclude Include="..\Include\internal\pycore_hamt.h" /> <ClInclude Include="..\Include\internal\pycore_hashtable.h" /> <ClInclude Include="..\Include\internal\pycore_import.h" /> @@ -230,13 +236,17 @@ <ClInclude Include="..\Include\internal\pycore_moduleobject.h" /> <ClInclude Include="..\Include\internal\pycore_namespace.h" /> <ClInclude Include="..\Include\internal\pycore_object.h" /> + <ClInclude Include="..\Include\internal\pycore_obmalloc.h" /> + <ClInclude Include="..\Include\internal\pycore_obmalloc_init.h" /> <ClInclude Include="..\Include\internal\pycore_pathconfig.h" /> <ClInclude Include="..\Include\internal\pycore_pyarena.h" /> <ClInclude Include="..\Include\internal\pycore_pyerrors.h" /> <ClInclude Include="..\Include\internal\pycore_pyhash.h" /> <ClInclude Include="..\Include\internal\pycore_pylifecycle.h" /> <ClInclude Include="..\Include\internal\pycore_pymem.h" /> + <ClInclude Include="..\Include\internal\pycore_pymem_init.h" /> <ClInclude Include="..\Include\internal\pycore_pystate.h" /> + <ClInclude Include="..\Include\internal\pycore_pythread.h" /> <ClInclude Include="..\Include\internal\pycore_range.h" /> <ClInclude Include="..\Include\internal\pycore_runtime.h" /> <ClInclude Include="..\Include\internal\pycore_runtime_init.h" /> @@ -247,13 +257,16 @@ <ClInclude Include="..\Include\internal\pycore_structseq.h" /> <ClInclude Include="..\Include\internal\pycore_sysmodule.h" /> <ClInclude Include="..\Include\internal\pycore_symtable.h" /> + <ClInclude Include="..\Include\internal\pycore_time.h" /> <ClInclude Include="..\Include\internal\pycore_token.h" /> <ClInclude Include="..\Include\internal\pycore_traceback.h" /> + <ClInclude Include="..\Include\internal\pycore_tracemalloc.h" /> <ClInclude Include="..\Include\internal\pycore_tuple.h" /> <ClInclude Include="..\Include\internal\pycore_typeobject.h" /> <ClInclude Include="..\Include\internal\pycore_ucnhash.h" /> <ClInclude Include="..\Include\internal\pycore_unionobject.h" /> <ClInclude Include="..\Include\internal\pycore_unicodeobject.h" /> + <ClInclude Include="..\Include\internal\pycore_unicodeobject_generated.h" /> <ClInclude Include="..\Include\internal\pycore_warnings.h" /> <ClInclude Include="..\Include\intrcheck.h" /> <ClInclude Include="..\Include\iterobject.h" /> @@ -588,7 +601,7 @@ </PropertyGroup> <Message Text="Building $(GitTag):$(GitVersion) $(GitBranch)" Importance="high" /> <ItemGroup> - <ClCompile Condition="$(Filename) == 'getbuildinfo'"> + <ClCompile Condition="%(Filename) == 'getbuildinfo'"> <PreprocessorDefinitions>GITVERSION="$(GitVersion)";GITTAG="$(GitTag)";GITBRANCH="$(GitBranch)";%(PreprocessorDefinitions)</PreprocessorDefinitions> </ClCompile> </ItemGroup> diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index ab7d01974b006c..339e7cc4937a3d 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -384,6 +384,9 @@ <ClInclude Include="..\Include\cpython\longobject.h"> <Filter>Include</Filter> </ClInclude> + <ClInclude Include="..\Include\cpython\memoryobject.h"> + <Filter>Include</Filter> + </ClInclude> <ClInclude Include="..\Include\cpython\odictobject.h"> <Filter>Include</Filter> </ClInclude> @@ -477,6 +480,9 @@ <ClInclude Include="..\Include\internal\pycore_unicodeobject.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_unicodeobject_generated.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_warnings.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -513,6 +519,9 @@ <ClInclude Include="..\Include\internal\pycore_ceval.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_ceval_state.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_code.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -528,12 +537,21 @@ <ClInclude Include="..\Include\internal\pycore_descrobject.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_dict.h"> + <Filter>Include\internal</Filter> + </ClInclude> + <ClInclude Include="..\Include\internal\pycore_dict_state.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_dtoa.h"> <Filter>Include\internal</Filter> </ClInclude> <ClInclude Include="..\Include\internal\pycore_exceptions.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_faulthandler.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_fileutils.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -543,6 +561,9 @@ <ClInclude Include="..\Include\internal\pycore_format.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_function.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_gc.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -558,6 +579,9 @@ <ClInclude Include="..\Include\internal\pycore_global_objects.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_global_objects_fini_generated.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_hamt.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -591,6 +615,12 @@ <ClInclude Include="..\Include\internal\pycore_object.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_obmalloc.h"> + <Filter>Include\internal</Filter> + </ClInclude> + <ClInclude Include="..\Include\internal\pycore_obmalloc_init.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_pathconfig.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -609,9 +639,15 @@ <ClInclude Include="..\Include\internal\pycore_pymem.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_pymem_init.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_pystate.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_pythread.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_range.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -639,12 +675,18 @@ <ClInclude Include="..\Include\internal\pycore_symtable.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_time.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_token.h"> <Filter>Include\internal</Filter> </ClInclude> <ClInclude Include="..\Include\internal\pycore_traceback.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_tracemalloc.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_tuple.h"> <Filter>Include\internal</Filter> </ClInclude> diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index 5ba3e3940627b2..3ed26a47b066b9 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -1,7 +1,7 @@ Quick Start Guide ----------------- -1. Install Microsoft Visual Studio 2017 with Python workload and +1. Install Microsoft Visual Studio 2017 or later with Python workload and Python native development component. 1a. Optionally install Python 3.6 or later. If not installed, get_externals.bat (via build.bat) will download and use Python via @@ -147,6 +147,7 @@ _testcapi _testconsole _testimportmultiple _testmultiphase +_testsinglephase _tkinter pyexpat select diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index d1be679aff2e7b..f12dad095acaa8 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -12,6 +12,8 @@ _create_dummy_identifier(Parser *p) void * _PyPegen_dummy_name(Parser *p, ...) { + // XXX This leaks memory from the initial arena. + // Use a statically allocated variable instead of a pointer? static void *cache = NULL; if (cache != NULL) { @@ -1286,4 +1288,4 @@ _PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq _PyPegen_get_last_comprehension_item(last_comprehension), "Generator expression must be parenthesized" ); -} \ No newline at end of file +} diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 6bd2e66c804d1f..3e307610b635f4 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -675,8 +675,7 @@ def visitField(self, field, name, sum=None, prod=None, depth=0): self.emit("if (%s == NULL) goto failed;" % field.name, depth+1) self.emit("for (i = 0; i < len; i++) {", depth+1) self.emit("%s val;" % ctype, depth+2) - self.emit("PyObject *tmp2 = PyList_GET_ITEM(tmp, i);", depth+2) - self.emit("Py_INCREF(tmp2);", depth+2) + self.emit("PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i));", depth+2) with self.recursive_call(name, depth+2): self.emit("res = obj2ast_%s(state, tmp2, &val, arena);" % field.type, depth+2, reflow=False) @@ -995,10 +994,11 @@ def visitModule(self, mod): static PyObject* ast2obj_object(struct ast_state *Py_UNUSED(state), void *o) { - if (!o) - o = Py_None; - Py_INCREF((PyObject*)o); - return (PyObject*)o; + PyObject *op = (PyObject*)o; + if (!op) { + op = Py_None; + } + return Py_NewRef(op); } #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object @@ -1020,9 +1020,11 @@ def visitModule(self, mod): *out = NULL; return -1; } - Py_INCREF(obj); + *out = Py_NewRef(obj); + } + else { + *out = NULL; } - *out = obj; return 0; } @@ -1032,8 +1034,7 @@ def visitModule(self, mod): *out = NULL; return -1; } - Py_INCREF(obj); - *out = obj; + *out = Py_NewRef(obj); return 0; } @@ -1301,8 +1302,7 @@ def simpleSum(self, sum, name): self.emit("switch(o) {", 1) for t in sum.types: self.emit("case %s:" % t.name, 2) - self.emit("Py_INCREF(state->%s_singleton);" % t.name, 3) - self.emit("return state->%s_singleton;" % t.name, 3) + self.emit("return Py_NewRef(state->%s_singleton);" % t.name, 3) self.emit("}", 1) self.emit("Py_UNREACHABLE();", 1); self.emit("}", 0) @@ -1484,6 +1484,10 @@ def generate_ast_fini(module_state, f): for s in module_state: f.write(" Py_CLEAR(state->" + s + ');\n') f.write(textwrap.dedent(""" + if (_PyInterpreterState_Get() == _PyInterpreterState_Main()) { + Py_CLEAR(_Py_CACHED_OBJECT(str_replace_inf)); + } + #if !defined(NDEBUG) state->initialized = -1; #else diff --git a/Parser/parser.c b/Parser/parser.c index bd3204af3e9388..d0bc25927bece6 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -17,28 +17,28 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 634}, - {"as", 632}, - {"in", 643}, + {"if", 641}, + {"as", 639}, + {"in", 650}, {"or", 574}, {"is", 582}, {NULL, -1}, }, (KeywordToken[]) { {"del", 603}, - {"def", 644}, - {"for", 642}, - {"try", 618}, + {"def", 651}, + {"for", 649}, + {"try", 623}, {"and", 575}, {"not", 581}, {NULL, -1}, }, (KeywordToken[]) { - {"from", 572}, + {"from", 607}, {"pass", 504}, - {"with", 612}, - {"elif", 636}, - {"else", 637}, + {"with", 614}, + {"elif", 643}, + {"else", 644}, {"None", 601}, {"True", 600}, {NULL, -1}, @@ -47,22 +47,22 @@ static KeywordToken *reserved_keywords[] = { {"raise", 522}, {"yield", 573}, {"break", 508}, - {"class", 646}, - {"while", 639}, + {"class", 653}, + {"while", 646}, {"False", 602}, {NULL, -1}, }, (KeywordToken[]) { {"return", 519}, - {"import", 531}, + {"import", 606}, {"assert", 526}, {"global", 523}, - {"except", 629}, + {"except", 636}, {"lambda", 586}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 625}, + {"finally", 632}, {NULL, -1}, }, (KeywordToken[]) { @@ -284,277 +284,281 @@ static char *soft_keywords[] = { #define invalid_with_item_type 1204 #define invalid_for_target_type 1205 #define invalid_group_type 1206 -#define invalid_import_from_targets_type 1207 -#define invalid_with_stmt_type 1208 -#define invalid_with_stmt_indent_type 1209 -#define invalid_try_stmt_type 1210 -#define invalid_except_stmt_type 1211 -#define invalid_finally_stmt_type 1212 -#define invalid_except_stmt_indent_type 1213 -#define invalid_except_star_stmt_indent_type 1214 -#define invalid_match_stmt_type 1215 -#define invalid_case_block_type 1216 -#define invalid_as_pattern_type 1217 -#define invalid_class_pattern_type 1218 -#define invalid_class_argument_pattern_type 1219 -#define invalid_if_stmt_type 1220 -#define invalid_elif_stmt_type 1221 -#define invalid_else_stmt_type 1222 -#define invalid_while_stmt_type 1223 -#define invalid_for_stmt_type 1224 -#define invalid_def_raw_type 1225 -#define invalid_class_def_raw_type 1226 -#define invalid_double_starred_kvpairs_type 1227 -#define invalid_kvpair_type 1228 -#define _loop0_1_type 1229 -#define _loop0_2_type 1230 -#define _loop1_3_type 1231 -#define _loop0_5_type 1232 -#define _gather_4_type 1233 -#define _tmp_6_type 1234 -#define _tmp_7_type 1235 -#define _tmp_8_type 1236 -#define _tmp_9_type 1237 -#define _tmp_10_type 1238 -#define _tmp_11_type 1239 -#define _tmp_12_type 1240 -#define _tmp_13_type 1241 -#define _loop1_14_type 1242 -#define _tmp_15_type 1243 -#define _tmp_16_type 1244 -#define _tmp_17_type 1245 -#define _loop0_19_type 1246 -#define _gather_18_type 1247 -#define _loop0_21_type 1248 -#define _gather_20_type 1249 -#define _tmp_22_type 1250 -#define _tmp_23_type 1251 -#define _loop0_24_type 1252 -#define _loop1_25_type 1253 -#define _loop0_27_type 1254 -#define _gather_26_type 1255 -#define _tmp_28_type 1256 -#define _loop0_30_type 1257 -#define _gather_29_type 1258 -#define _tmp_31_type 1259 -#define _loop1_32_type 1260 -#define _tmp_33_type 1261 -#define _tmp_34_type 1262 -#define _tmp_35_type 1263 -#define _loop0_36_type 1264 -#define _loop0_37_type 1265 -#define _loop0_38_type 1266 -#define _loop1_39_type 1267 -#define _loop0_40_type 1268 -#define _loop1_41_type 1269 -#define _loop1_42_type 1270 -#define _loop1_43_type 1271 -#define _loop0_44_type 1272 -#define _loop1_45_type 1273 -#define _loop0_46_type 1274 -#define _loop1_47_type 1275 -#define _loop0_48_type 1276 -#define _loop0_49_type 1277 -#define _loop1_50_type 1278 -#define _loop0_52_type 1279 -#define _gather_51_type 1280 -#define _loop0_54_type 1281 -#define _gather_53_type 1282 -#define _loop0_56_type 1283 -#define _gather_55_type 1284 -#define _loop0_58_type 1285 -#define _gather_57_type 1286 -#define _tmp_59_type 1287 -#define _loop1_60_type 1288 -#define _loop1_61_type 1289 -#define _tmp_62_type 1290 -#define _tmp_63_type 1291 -#define _loop1_64_type 1292 -#define _loop0_66_type 1293 -#define _gather_65_type 1294 -#define _tmp_67_type 1295 -#define _tmp_68_type 1296 -#define _tmp_69_type 1297 -#define _tmp_70_type 1298 -#define _loop0_72_type 1299 -#define _gather_71_type 1300 -#define _loop0_74_type 1301 -#define _gather_73_type 1302 -#define _tmp_75_type 1303 -#define _loop0_77_type 1304 -#define _gather_76_type 1305 -#define _loop0_79_type 1306 -#define _gather_78_type 1307 -#define _loop1_80_type 1308 -#define _loop1_81_type 1309 -#define _loop0_83_type 1310 -#define _gather_82_type 1311 -#define _loop1_84_type 1312 -#define _loop1_85_type 1313 -#define _loop1_86_type 1314 -#define _tmp_87_type 1315 -#define _loop0_89_type 1316 -#define _gather_88_type 1317 -#define _tmp_90_type 1318 -#define _tmp_91_type 1319 -#define _tmp_92_type 1320 -#define _tmp_93_type 1321 -#define _tmp_94_type 1322 -#define _loop0_95_type 1323 -#define _loop0_96_type 1324 -#define _loop0_97_type 1325 -#define _loop1_98_type 1326 -#define _loop0_99_type 1327 -#define _loop1_100_type 1328 -#define _loop1_101_type 1329 -#define _loop1_102_type 1330 -#define _loop0_103_type 1331 -#define _loop1_104_type 1332 -#define _loop0_105_type 1333 -#define _loop1_106_type 1334 -#define _loop0_107_type 1335 -#define _loop1_108_type 1336 -#define _loop1_109_type 1337 -#define _tmp_110_type 1338 -#define _loop0_112_type 1339 -#define _gather_111_type 1340 -#define _loop1_113_type 1341 -#define _loop0_114_type 1342 -#define _loop0_115_type 1343 -#define _tmp_116_type 1344 -#define _loop0_118_type 1345 -#define _gather_117_type 1346 -#define _tmp_119_type 1347 -#define _loop0_121_type 1348 -#define _gather_120_type 1349 -#define _loop0_123_type 1350 -#define _gather_122_type 1351 -#define _loop0_125_type 1352 -#define _gather_124_type 1353 -#define _loop0_127_type 1354 -#define _gather_126_type 1355 -#define _loop0_128_type 1356 -#define _loop0_130_type 1357 -#define _gather_129_type 1358 -#define _loop1_131_type 1359 -#define _tmp_132_type 1360 -#define _loop0_134_type 1361 -#define _gather_133_type 1362 -#define _loop0_136_type 1363 -#define _gather_135_type 1364 -#define _loop0_138_type 1365 -#define _gather_137_type 1366 -#define _loop0_140_type 1367 -#define _gather_139_type 1368 -#define _loop0_142_type 1369 -#define _gather_141_type 1370 -#define _tmp_143_type 1371 -#define _tmp_144_type 1372 -#define _tmp_145_type 1373 -#define _tmp_146_type 1374 -#define _tmp_147_type 1375 -#define _tmp_148_type 1376 -#define _tmp_149_type 1377 -#define _tmp_150_type 1378 -#define _tmp_151_type 1379 -#define _loop0_152_type 1380 -#define _loop0_153_type 1381 -#define _loop0_154_type 1382 -#define _tmp_155_type 1383 -#define _tmp_156_type 1384 -#define _tmp_157_type 1385 -#define _tmp_158_type 1386 -#define _tmp_159_type 1387 -#define _loop0_160_type 1388 -#define _loop0_161_type 1389 -#define _loop0_162_type 1390 -#define _loop1_163_type 1391 -#define _tmp_164_type 1392 -#define _loop0_165_type 1393 -#define _tmp_166_type 1394 -#define _loop0_167_type 1395 -#define _loop1_168_type 1396 -#define _tmp_169_type 1397 -#define _tmp_170_type 1398 -#define _tmp_171_type 1399 -#define _loop0_172_type 1400 -#define _tmp_173_type 1401 -#define _tmp_174_type 1402 -#define _loop1_175_type 1403 -#define _tmp_176_type 1404 -#define _loop0_177_type 1405 -#define _loop0_178_type 1406 -#define _loop0_179_type 1407 -#define _loop0_181_type 1408 -#define _gather_180_type 1409 -#define _tmp_182_type 1410 -#define _loop0_183_type 1411 -#define _tmp_184_type 1412 -#define _loop0_185_type 1413 -#define _loop1_186_type 1414 -#define _loop1_187_type 1415 -#define _tmp_188_type 1416 -#define _tmp_189_type 1417 -#define _loop0_190_type 1418 -#define _tmp_191_type 1419 -#define _tmp_192_type 1420 -#define _tmp_193_type 1421 -#define _loop0_195_type 1422 -#define _gather_194_type 1423 -#define _loop0_197_type 1424 -#define _gather_196_type 1425 -#define _loop0_199_type 1426 -#define _gather_198_type 1427 -#define _loop0_201_type 1428 -#define _gather_200_type 1429 -#define _tmp_202_type 1430 -#define _loop0_203_type 1431 -#define _tmp_204_type 1432 -#define _loop0_205_type 1433 -#define _tmp_206_type 1434 -#define _tmp_207_type 1435 -#define _tmp_208_type 1436 -#define _tmp_209_type 1437 -#define _tmp_210_type 1438 -#define _tmp_211_type 1439 -#define _tmp_212_type 1440 -#define _tmp_213_type 1441 -#define _tmp_214_type 1442 -#define _loop0_216_type 1443 -#define _gather_215_type 1444 -#define _tmp_217_type 1445 -#define _tmp_218_type 1446 -#define _tmp_219_type 1447 -#define _tmp_220_type 1448 -#define _tmp_221_type 1449 -#define _tmp_222_type 1450 -#define _tmp_223_type 1451 -#define _tmp_224_type 1452 -#define _tmp_225_type 1453 -#define _tmp_226_type 1454 -#define _tmp_227_type 1455 -#define _tmp_228_type 1456 -#define _tmp_229_type 1457 -#define _tmp_230_type 1458 -#define _tmp_231_type 1459 -#define _tmp_232_type 1460 -#define _tmp_233_type 1461 -#define _tmp_234_type 1462 -#define _tmp_235_type 1463 -#define _tmp_236_type 1464 -#define _tmp_237_type 1465 -#define _tmp_238_type 1466 -#define _tmp_239_type 1467 -#define _tmp_240_type 1468 -#define _tmp_241_type 1469 -#define _tmp_242_type 1470 -#define _tmp_243_type 1471 -#define _tmp_244_type 1472 -#define _tmp_245_type 1473 -#define _tmp_246_type 1474 -#define _tmp_247_type 1475 -#define _loop1_248_type 1476 -#define _loop1_249_type 1477 +#define invalid_import_type 1207 +#define invalid_import_from_targets_type 1208 +#define invalid_with_stmt_type 1209 +#define invalid_with_stmt_indent_type 1210 +#define invalid_try_stmt_type 1211 +#define invalid_except_stmt_type 1212 +#define invalid_finally_stmt_type 1213 +#define invalid_except_stmt_indent_type 1214 +#define invalid_except_star_stmt_indent_type 1215 +#define invalid_match_stmt_type 1216 +#define invalid_case_block_type 1217 +#define invalid_as_pattern_type 1218 +#define invalid_class_pattern_type 1219 +#define invalid_class_argument_pattern_type 1220 +#define invalid_if_stmt_type 1221 +#define invalid_elif_stmt_type 1222 +#define invalid_else_stmt_type 1223 +#define invalid_while_stmt_type 1224 +#define invalid_for_stmt_type 1225 +#define invalid_def_raw_type 1226 +#define invalid_class_def_raw_type 1227 +#define invalid_double_starred_kvpairs_type 1228 +#define invalid_kvpair_type 1229 +#define invalid_starred_expression_type 1230 +#define _loop0_1_type 1231 +#define _loop0_2_type 1232 +#define _loop1_3_type 1233 +#define _loop0_5_type 1234 +#define _gather_4_type 1235 +#define _tmp_6_type 1236 +#define _tmp_7_type 1237 +#define _tmp_8_type 1238 +#define _tmp_9_type 1239 +#define _tmp_10_type 1240 +#define _tmp_11_type 1241 +#define _tmp_12_type 1242 +#define _tmp_13_type 1243 +#define _loop1_14_type 1244 +#define _tmp_15_type 1245 +#define _tmp_16_type 1246 +#define _tmp_17_type 1247 +#define _loop0_19_type 1248 +#define _gather_18_type 1249 +#define _loop0_21_type 1250 +#define _gather_20_type 1251 +#define _tmp_22_type 1252 +#define _tmp_23_type 1253 +#define _loop0_24_type 1254 +#define _loop1_25_type 1255 +#define _loop0_27_type 1256 +#define _gather_26_type 1257 +#define _tmp_28_type 1258 +#define _loop0_30_type 1259 +#define _gather_29_type 1260 +#define _tmp_31_type 1261 +#define _loop1_32_type 1262 +#define _tmp_33_type 1263 +#define _tmp_34_type 1264 +#define _tmp_35_type 1265 +#define _loop0_36_type 1266 +#define _loop0_37_type 1267 +#define _loop0_38_type 1268 +#define _loop1_39_type 1269 +#define _loop0_40_type 1270 +#define _loop1_41_type 1271 +#define _loop1_42_type 1272 +#define _loop1_43_type 1273 +#define _loop0_44_type 1274 +#define _loop1_45_type 1275 +#define _loop0_46_type 1276 +#define _loop1_47_type 1277 +#define _loop0_48_type 1278 +#define _loop0_49_type 1279 +#define _loop1_50_type 1280 +#define _loop0_52_type 1281 +#define _gather_51_type 1282 +#define _loop0_54_type 1283 +#define _gather_53_type 1284 +#define _loop0_56_type 1285 +#define _gather_55_type 1286 +#define _loop0_58_type 1287 +#define _gather_57_type 1288 +#define _tmp_59_type 1289 +#define _loop1_60_type 1290 +#define _loop1_61_type 1291 +#define _tmp_62_type 1292 +#define _tmp_63_type 1293 +#define _loop1_64_type 1294 +#define _loop0_66_type 1295 +#define _gather_65_type 1296 +#define _tmp_67_type 1297 +#define _tmp_68_type 1298 +#define _tmp_69_type 1299 +#define _tmp_70_type 1300 +#define _loop0_72_type 1301 +#define _gather_71_type 1302 +#define _loop0_74_type 1303 +#define _gather_73_type 1304 +#define _tmp_75_type 1305 +#define _loop0_77_type 1306 +#define _gather_76_type 1307 +#define _loop0_79_type 1308 +#define _gather_78_type 1309 +#define _loop1_80_type 1310 +#define _loop1_81_type 1311 +#define _loop0_83_type 1312 +#define _gather_82_type 1313 +#define _loop1_84_type 1314 +#define _loop1_85_type 1315 +#define _loop1_86_type 1316 +#define _tmp_87_type 1317 +#define _loop0_89_type 1318 +#define _gather_88_type 1319 +#define _tmp_90_type 1320 +#define _tmp_91_type 1321 +#define _tmp_92_type 1322 +#define _tmp_93_type 1323 +#define _tmp_94_type 1324 +#define _loop0_95_type 1325 +#define _loop0_96_type 1326 +#define _loop0_97_type 1327 +#define _loop1_98_type 1328 +#define _loop0_99_type 1329 +#define _loop1_100_type 1330 +#define _loop1_101_type 1331 +#define _loop1_102_type 1332 +#define _loop0_103_type 1333 +#define _loop1_104_type 1334 +#define _loop0_105_type 1335 +#define _loop1_106_type 1336 +#define _loop0_107_type 1337 +#define _loop1_108_type 1338 +#define _loop1_109_type 1339 +#define _tmp_110_type 1340 +#define _loop0_112_type 1341 +#define _gather_111_type 1342 +#define _loop1_113_type 1343 +#define _loop0_114_type 1344 +#define _loop0_115_type 1345 +#define _tmp_116_type 1346 +#define _loop0_118_type 1347 +#define _gather_117_type 1348 +#define _tmp_119_type 1349 +#define _loop0_121_type 1350 +#define _gather_120_type 1351 +#define _loop0_123_type 1352 +#define _gather_122_type 1353 +#define _loop0_125_type 1354 +#define _gather_124_type 1355 +#define _loop0_127_type 1356 +#define _gather_126_type 1357 +#define _loop0_128_type 1358 +#define _loop0_130_type 1359 +#define _gather_129_type 1360 +#define _loop1_131_type 1361 +#define _tmp_132_type 1362 +#define _loop0_134_type 1363 +#define _gather_133_type 1364 +#define _loop0_136_type 1365 +#define _gather_135_type 1366 +#define _loop0_138_type 1367 +#define _gather_137_type 1368 +#define _loop0_140_type 1369 +#define _gather_139_type 1370 +#define _loop0_142_type 1371 +#define _gather_141_type 1372 +#define _tmp_143_type 1373 +#define _tmp_144_type 1374 +#define _tmp_145_type 1375 +#define _tmp_146_type 1376 +#define _tmp_147_type 1377 +#define _tmp_148_type 1378 +#define _tmp_149_type 1379 +#define _tmp_150_type 1380 +#define _tmp_151_type 1381 +#define _tmp_152_type 1382 +#define _tmp_153_type 1383 +#define _loop0_154_type 1384 +#define _loop0_155_type 1385 +#define _loop0_156_type 1386 +#define _tmp_157_type 1387 +#define _tmp_158_type 1388 +#define _tmp_159_type 1389 +#define _tmp_160_type 1390 +#define _tmp_161_type 1391 +#define _loop0_162_type 1392 +#define _loop0_163_type 1393 +#define _loop0_164_type 1394 +#define _loop1_165_type 1395 +#define _tmp_166_type 1396 +#define _loop0_167_type 1397 +#define _tmp_168_type 1398 +#define _loop0_169_type 1399 +#define _loop1_170_type 1400 +#define _tmp_171_type 1401 +#define _tmp_172_type 1402 +#define _tmp_173_type 1403 +#define _loop0_174_type 1404 +#define _tmp_175_type 1405 +#define _tmp_176_type 1406 +#define _loop1_177_type 1407 +#define _tmp_178_type 1408 +#define _loop0_179_type 1409 +#define _loop0_180_type 1410 +#define _loop0_181_type 1411 +#define _loop0_183_type 1412 +#define _gather_182_type 1413 +#define _tmp_184_type 1414 +#define _loop0_185_type 1415 +#define _tmp_186_type 1416 +#define _loop0_187_type 1417 +#define _loop1_188_type 1418 +#define _loop1_189_type 1419 +#define _tmp_190_type 1420 +#define _tmp_191_type 1421 +#define _loop0_192_type 1422 +#define _tmp_193_type 1423 +#define _tmp_194_type 1424 +#define _tmp_195_type 1425 +#define _loop0_197_type 1426 +#define _gather_196_type 1427 +#define _loop0_199_type 1428 +#define _gather_198_type 1429 +#define _loop0_201_type 1430 +#define _gather_200_type 1431 +#define _loop0_203_type 1432 +#define _gather_202_type 1433 +#define _tmp_204_type 1434 +#define _loop0_205_type 1435 +#define _loop1_206_type 1436 +#define _tmp_207_type 1437 +#define _loop0_208_type 1438 +#define _loop1_209_type 1439 +#define _tmp_210_type 1440 +#define _tmp_211_type 1441 +#define _tmp_212_type 1442 +#define _tmp_213_type 1443 +#define _tmp_214_type 1444 +#define _tmp_215_type 1445 +#define _tmp_216_type 1446 +#define _tmp_217_type 1447 +#define _tmp_218_type 1448 +#define _tmp_219_type 1449 +#define _loop0_221_type 1450 +#define _gather_220_type 1451 +#define _tmp_222_type 1452 +#define _tmp_223_type 1453 +#define _tmp_224_type 1454 +#define _tmp_225_type 1455 +#define _tmp_226_type 1456 +#define _tmp_227_type 1457 +#define _tmp_228_type 1458 +#define _tmp_229_type 1459 +#define _tmp_230_type 1460 +#define _tmp_231_type 1461 +#define _tmp_232_type 1462 +#define _tmp_233_type 1463 +#define _tmp_234_type 1464 +#define _tmp_235_type 1465 +#define _tmp_236_type 1466 +#define _tmp_237_type 1467 +#define _tmp_238_type 1468 +#define _tmp_239_type 1469 +#define _tmp_240_type 1470 +#define _tmp_241_type 1471 +#define _tmp_242_type 1472 +#define _tmp_243_type 1473 +#define _tmp_244_type 1474 +#define _tmp_245_type 1475 +#define _tmp_246_type 1476 +#define _tmp_247_type 1477 +#define _tmp_248_type 1478 +#define _tmp_249_type 1479 +#define _tmp_250_type 1480 +#define _tmp_251_type 1481 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -763,6 +767,7 @@ static void *invalid_double_type_comments_rule(Parser *p); static void *invalid_with_item_rule(Parser *p); static void *invalid_for_target_rule(Parser *p); static void *invalid_group_rule(Parser *p); +static void *invalid_import_rule(Parser *p); static void *invalid_import_from_targets_rule(Parser *p); static void *invalid_with_stmt_rule(Parser *p); static void *invalid_with_stmt_indent_rule(Parser *p); @@ -785,6 +790,7 @@ static void *invalid_def_raw_rule(Parser *p); static void *invalid_class_def_raw_rule(Parser *p); static void *invalid_double_starred_kvpairs_rule(Parser *p); static void *invalid_kvpair_rule(Parser *p); +static void *invalid_starred_expression_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); static asdl_seq *_loop1_3_rule(Parser *p); @@ -936,76 +942,76 @@ static void *_tmp_148_rule(Parser *p); static void *_tmp_149_rule(Parser *p); static void *_tmp_150_rule(Parser *p); static void *_tmp_151_rule(Parser *p); -static asdl_seq *_loop0_152_rule(Parser *p); -static asdl_seq *_loop0_153_rule(Parser *p); +static void *_tmp_152_rule(Parser *p); +static void *_tmp_153_rule(Parser *p); static asdl_seq *_loop0_154_rule(Parser *p); -static void *_tmp_155_rule(Parser *p); -static void *_tmp_156_rule(Parser *p); +static asdl_seq *_loop0_155_rule(Parser *p); +static asdl_seq *_loop0_156_rule(Parser *p); static void *_tmp_157_rule(Parser *p); static void *_tmp_158_rule(Parser *p); static void *_tmp_159_rule(Parser *p); -static asdl_seq *_loop0_160_rule(Parser *p); -static asdl_seq *_loop0_161_rule(Parser *p); +static void *_tmp_160_rule(Parser *p); +static void *_tmp_161_rule(Parser *p); static asdl_seq *_loop0_162_rule(Parser *p); -static asdl_seq *_loop1_163_rule(Parser *p); -static void *_tmp_164_rule(Parser *p); -static asdl_seq *_loop0_165_rule(Parser *p); +static asdl_seq *_loop0_163_rule(Parser *p); +static asdl_seq *_loop0_164_rule(Parser *p); +static asdl_seq *_loop1_165_rule(Parser *p); static void *_tmp_166_rule(Parser *p); static asdl_seq *_loop0_167_rule(Parser *p); -static asdl_seq *_loop1_168_rule(Parser *p); -static void *_tmp_169_rule(Parser *p); -static void *_tmp_170_rule(Parser *p); +static void *_tmp_168_rule(Parser *p); +static asdl_seq *_loop0_169_rule(Parser *p); +static asdl_seq *_loop1_170_rule(Parser *p); static void *_tmp_171_rule(Parser *p); -static asdl_seq *_loop0_172_rule(Parser *p); +static void *_tmp_172_rule(Parser *p); static void *_tmp_173_rule(Parser *p); -static void *_tmp_174_rule(Parser *p); -static asdl_seq *_loop1_175_rule(Parser *p); +static asdl_seq *_loop0_174_rule(Parser *p); +static void *_tmp_175_rule(Parser *p); static void *_tmp_176_rule(Parser *p); -static asdl_seq *_loop0_177_rule(Parser *p); -static asdl_seq *_loop0_178_rule(Parser *p); +static asdl_seq *_loop1_177_rule(Parser *p); +static void *_tmp_178_rule(Parser *p); static asdl_seq *_loop0_179_rule(Parser *p); +static asdl_seq *_loop0_180_rule(Parser *p); static asdl_seq *_loop0_181_rule(Parser *p); -static asdl_seq *_gather_180_rule(Parser *p); -static void *_tmp_182_rule(Parser *p); static asdl_seq *_loop0_183_rule(Parser *p); +static asdl_seq *_gather_182_rule(Parser *p); static void *_tmp_184_rule(Parser *p); static asdl_seq *_loop0_185_rule(Parser *p); -static asdl_seq *_loop1_186_rule(Parser *p); -static asdl_seq *_loop1_187_rule(Parser *p); -static void *_tmp_188_rule(Parser *p); -static void *_tmp_189_rule(Parser *p); -static asdl_seq *_loop0_190_rule(Parser *p); +static void *_tmp_186_rule(Parser *p); +static asdl_seq *_loop0_187_rule(Parser *p); +static asdl_seq *_loop1_188_rule(Parser *p); +static asdl_seq *_loop1_189_rule(Parser *p); +static void *_tmp_190_rule(Parser *p); static void *_tmp_191_rule(Parser *p); -static void *_tmp_192_rule(Parser *p); +static asdl_seq *_loop0_192_rule(Parser *p); static void *_tmp_193_rule(Parser *p); -static asdl_seq *_loop0_195_rule(Parser *p); -static asdl_seq *_gather_194_rule(Parser *p); +static void *_tmp_194_rule(Parser *p); +static void *_tmp_195_rule(Parser *p); static asdl_seq *_loop0_197_rule(Parser *p); static asdl_seq *_gather_196_rule(Parser *p); static asdl_seq *_loop0_199_rule(Parser *p); static asdl_seq *_gather_198_rule(Parser *p); static asdl_seq *_loop0_201_rule(Parser *p); static asdl_seq *_gather_200_rule(Parser *p); -static void *_tmp_202_rule(Parser *p); static asdl_seq *_loop0_203_rule(Parser *p); +static asdl_seq *_gather_202_rule(Parser *p); static void *_tmp_204_rule(Parser *p); static asdl_seq *_loop0_205_rule(Parser *p); -static void *_tmp_206_rule(Parser *p); +static asdl_seq *_loop1_206_rule(Parser *p); static void *_tmp_207_rule(Parser *p); -static void *_tmp_208_rule(Parser *p); -static void *_tmp_209_rule(Parser *p); +static asdl_seq *_loop0_208_rule(Parser *p); +static asdl_seq *_loop1_209_rule(Parser *p); static void *_tmp_210_rule(Parser *p); static void *_tmp_211_rule(Parser *p); static void *_tmp_212_rule(Parser *p); static void *_tmp_213_rule(Parser *p); static void *_tmp_214_rule(Parser *p); -static asdl_seq *_loop0_216_rule(Parser *p); -static asdl_seq *_gather_215_rule(Parser *p); +static void *_tmp_215_rule(Parser *p); +static void *_tmp_216_rule(Parser *p); static void *_tmp_217_rule(Parser *p); static void *_tmp_218_rule(Parser *p); static void *_tmp_219_rule(Parser *p); -static void *_tmp_220_rule(Parser *p); -static void *_tmp_221_rule(Parser *p); +static asdl_seq *_loop0_221_rule(Parser *p); +static asdl_seq *_gather_220_rule(Parser *p); static void *_tmp_222_rule(Parser *p); static void *_tmp_223_rule(Parser *p); static void *_tmp_224_rule(Parser *p); @@ -1032,8 +1038,10 @@ static void *_tmp_244_rule(Parser *p); static void *_tmp_245_rule(Parser *p); static void *_tmp_246_rule(Parser *p); static void *_tmp_247_rule(Parser *p); -static asdl_seq *_loop1_248_rule(Parser *p); -static asdl_seq *_loop1_249_rule(Parser *p); +static void *_tmp_248_rule(Parser *p); +static void *_tmp_249_rule(Parser *p); +static void *_tmp_250_rule(Parser *p); +static void *_tmp_251_rule(Parser *p); // file: statements? $ @@ -2019,7 +2027,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 634) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 641) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2103,7 +2111,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 618) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 623) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2124,7 +2132,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 639) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 646) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -3301,7 +3309,7 @@ assert_stmt_rule(Parser *p) return _res; } -// import_stmt: import_name | import_from +// import_stmt: invalid_import | import_name | import_from static stmt_ty import_stmt_rule(Parser *p) { @@ -3315,6 +3323,25 @@ import_stmt_rule(Parser *p) } stmt_ty _res = NULL; int _mark = p->mark; + if (p->call_invalid_rules) { // invalid_import + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_import")); + void *invalid_import_var; + if ( + (invalid_import_var = invalid_import_rule(p)) // invalid_import + ) + { + D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_import")); + _res = invalid_import_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_import")); + } { // import_name if (p->error_indicator) { p->level--; @@ -3391,7 +3418,7 @@ import_name_rule(Parser *p) Token * _keyword; asdl_alias_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword = _PyPegen_expect_token(p, 606)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) @@ -3461,13 +3488,13 @@ import_from_rule(Parser *p) expr_ty b; asdl_alias_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' && (a = _loop0_24_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (_keyword_1 = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) @@ -3505,11 +3532,11 @@ import_from_rule(Parser *p) asdl_seq * a; asdl_alias_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' && (a = _loop1_25_rule(p)) // (('.' | '...'))+ && - (_keyword_1 = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) @@ -4266,7 +4293,7 @@ class_def_raw_rule(Parser *p) void *b; asdl_stmt_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='class' + (_keyword = _PyPegen_expect_token(p, 653)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4432,7 +4459,7 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='def' + (_keyword = _PyPegen_expect_token(p, 651)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4492,7 +4519,7 @@ function_def_raw_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 644)) // token='def' + (_keyword = _PyPegen_expect_token(p, 651)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -5844,7 +5871,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -5889,7 +5916,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -5985,7 +6012,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6030,7 +6057,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6112,7 +6139,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword = _PyPegen_expect_token(p, 644)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6192,7 +6219,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='while' + (_keyword = _PyPegen_expect_token(p, 646)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6293,11 +6320,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -6357,11 +6384,11 @@ for_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -6490,7 +6517,7 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6539,7 +6566,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (a = (asdl_withitem_seq*)_gather_53_rule(p)) // ','.with_item+ && @@ -6590,7 +6617,7 @@ with_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6642,7 +6669,7 @@ with_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (a = (asdl_withitem_seq*)_gather_57_rule(p)) // ','.with_item+ && @@ -6729,7 +6756,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -6855,7 +6882,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6899,7 +6926,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6947,7 +6974,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7046,7 +7073,7 @@ except_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (e = expression_rule(p)) // expression && @@ -7089,7 +7116,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7201,7 +7228,7 @@ except_star_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7304,7 +7331,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 632)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7616,7 +7643,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -7814,7 +7841,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -10642,11 +10669,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else' && (c = expression_rule(p)) // expression ) @@ -10753,7 +10780,7 @@ yield_expr_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 573)) // token='yield' && - (_keyword_1 = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='from' && (a = expression_rule(p)) // expression ) @@ -12203,7 +12230,7 @@ notin_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 581)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12250,7 +12277,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword = _PyPegen_expect_token(p, 650)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -16035,11 +16062,11 @@ for_if_clause_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -16078,11 +16105,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -16760,7 +16787,7 @@ kwargs_rule(Parser *p) return _res; } -// starred_expression: '*' expression +// starred_expression: invalid_starred_expression | '*' expression static expr_ty starred_expression_rule(Parser *p) { @@ -16783,6 +16810,25 @@ starred_expression_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro + if (p->call_invalid_rules) { // invalid_starred_expression + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> starred_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_starred_expression")); + void *invalid_starred_expression_var; + if ( + (invalid_starred_expression_var = invalid_starred_expression_rule(p)) // invalid_starred_expression + ) + { + D(fprintf(stderr, "%*c+ starred_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_starred_expression")); + _res = invalid_starred_expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s starred_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_starred_expression")); + } { // '*' expression if (p->error_indicator) { p->level--; @@ -18918,6 +18964,7 @@ func_type_comment_rule(Parser *p) // | args ',' '*' // | expression for_if_clauses ',' [args | expression for_if_clauses] // | NAME '=' expression for_if_clauses +// | [(args ',')] NAME '=' &(',' | ')') // | args for_if_clauses // | args ',' expression for_if_clauses // | args ',' args @@ -19031,6 +19078,39 @@ invalid_arguments_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_arguments[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression for_if_clauses")); } + { // [(args ',')] NAME '=' &(',' | ')') + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + Token * b; + if ( + (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [(args ',')] + && + (a = _PyPegen_name_token(p)) // NAME + && + (b = _PyPegen_expect_token(p, 22)) // token='=' + && + _PyPegen_lookahead(1, _tmp_146_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "expected argument value expression" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); + } { // args for_if_clauses if (p->error_indicator) { p->level--; @@ -19131,6 +19211,7 @@ invalid_arguments_rule(Parser *p) // | ('True' | 'False' | 'None') '=' // | NAME '=' expression for_if_clauses // | !(NAME '=') expression '=' +// | '**' expression '=' expression static void * invalid_kwarg_rule(Parser *p) { @@ -19153,7 +19234,7 @@ invalid_kwarg_rule(Parser *p) Token* a; Token * b; if ( - (a = (Token*)_tmp_145_rule(p)) // 'True' | 'False' | 'None' + (a = (Token*)_tmp_147_rule(p)) // 'True' | 'False' | 'None' && (b = _PyPegen_expect_token(p, 22)) // token='=' ) @@ -19213,7 +19294,7 @@ invalid_kwarg_rule(Parser *p) expr_ty a; Token * b; if ( - _PyPegen_lookahead(0, _tmp_146_rule, p) + _PyPegen_lookahead(0, _tmp_148_rule, p) && (a = expression_rule(p)) // expression && @@ -19233,6 +19314,39 @@ invalid_kwarg_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_kwarg[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!(NAME '=') expression '='")); } + { // '**' expression '=' expression + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_kwarg[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression '=' expression")); + Token * _literal; + Token * a; + expr_ty b; + expr_ty expression_var; + if ( + (a = _PyPegen_expect_token(p, 35)) // token='**' + && + (expression_var = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ invalid_kwarg[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression '=' expression")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "cannot assign to keyword argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_kwarg[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression '=' expression")); + } _res = NULL; done: p->level--; @@ -19284,11 +19398,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else' && (c = expression_rule(p)) // expression ) @@ -19438,7 +19552,7 @@ invalid_expression_rule(Parser *p) expr_ty a; expr_ty b; if ( - _PyPegen_lookahead(0, _tmp_147_rule, p) + _PyPegen_lookahead(0, _tmp_149_rule, p) && (a = disjunction_rule(p)) // disjunction && @@ -19470,11 +19584,11 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (b = disjunction_rule(p)) // disjunction && - _PyPegen_lookahead(0, _tmp_148_rule, p) + _PyPegen_lookahead(0, _tmp_150_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')")); @@ -19563,7 +19677,7 @@ invalid_named_expression_rule(Parser *p) && (b = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_149_rule, p) + _PyPegen_lookahead(0, _tmp_151_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' bitwise_or !('=' | ':=')")); @@ -19589,7 +19703,7 @@ invalid_named_expression_rule(Parser *p) Token * b; expr_ty bitwise_or_var; if ( - _PyPegen_lookahead(0, _tmp_150_rule, p) + _PyPegen_lookahead(0, _tmp_152_rule, p) && (a = bitwise_or_rule(p)) // bitwise_or && @@ -19597,7 +19711,7 @@ invalid_named_expression_rule(Parser *p) && (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_151_rule, p) + _PyPegen_lookahead(0, _tmp_153_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')")); @@ -19678,7 +19792,7 @@ invalid_assignment_rule(Parser *p) D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression")); Token * _literal; Token * _literal_1; - asdl_seq * _loop0_152_var; + asdl_seq * _loop0_154_var; expr_ty a; expr_ty expression_var; if ( @@ -19686,7 +19800,7 @@ invalid_assignment_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_loop0_152_var = _loop0_152_rule(p)) // star_named_expressions* + (_loop0_154_var = _loop0_154_rule(p)) // star_named_expressions* && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -19743,10 +19857,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); Token * _literal; - asdl_seq * _loop0_153_var; + asdl_seq * _loop0_155_var; expr_ty a; if ( - (_loop0_153_var = _loop0_153_rule(p)) // ((star_targets '='))* + (_loop0_155_var = _loop0_155_rule(p)) // ((star_targets '='))* && (a = star_expressions_rule(p)) // star_expressions && @@ -19773,10 +19887,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); Token * _literal; - asdl_seq * _loop0_154_var; + asdl_seq * _loop0_156_var; expr_ty a; if ( - (_loop0_154_var = _loop0_154_rule(p)) // ((star_targets '='))* + (_loop0_156_var = _loop0_156_rule(p)) // ((star_targets '='))* && (a = yield_expr_rule(p)) // yield_expr && @@ -19802,7 +19916,7 @@ invalid_assignment_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_155_var; + void *_tmp_157_var; expr_ty a; AugOperator* augassign_var; if ( @@ -19810,7 +19924,7 @@ invalid_assignment_rule(Parser *p) && (augassign_var = augassign_rule(p)) // augassign && - (_tmp_155_var = _tmp_155_rule(p)) // yield_expr | star_expressions + (_tmp_157_var = _tmp_157_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); @@ -20036,11 +20150,11 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_156_var; + void *_tmp_158_var; expr_ty a; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_156_var = _tmp_156_rule(p)) // '[' | '(' | '{' + (_tmp_158_var = _tmp_158_rule(p)) // '[' | '(' | '{' && (a = starred_expression_rule(p)) // starred_expression && @@ -20067,12 +20181,12 @@ invalid_comprehension_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses")); Token * _literal; - void *_tmp_157_var; + void *_tmp_159_var; expr_ty a; asdl_expr_seq* b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_157_var = _tmp_157_rule(p)) // '[' | '{' + (_tmp_159_var = _tmp_159_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -20102,12 +20216,12 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' for_if_clauses")); - void *_tmp_158_var; + void *_tmp_160_var; expr_ty a; Token * b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_158_var = _tmp_158_rule(p)) // '[' | '{' + (_tmp_160_var = _tmp_160_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -20244,13 +20358,13 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slash_no_default | slash_with_default) param_maybe_default* '/'")); - asdl_seq * _loop0_160_var; - void *_tmp_159_var; + asdl_seq * _loop0_162_var; + void *_tmp_161_var; Token * a; if ( - (_tmp_159_var = _tmp_159_rule(p)) // slash_no_default | slash_with_default + (_tmp_161_var = _tmp_161_rule(p)) // slash_no_default | slash_with_default && - (_loop0_160_var = _loop0_160_rule(p)) // param_maybe_default* + (_loop0_162_var = _loop0_162_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20274,7 +20388,7 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default? param_no_default* invalid_parameters_helper param_no_default")); - asdl_seq * _loop0_161_var; + asdl_seq * _loop0_163_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -20282,7 +20396,7 @@ invalid_parameters_rule(Parser *p) if ( (_opt_var = slash_no_default_rule(p), !p->error_indicator) // slash_no_default? && - (_loop0_161_var = _loop0_161_rule(p)) // param_no_default* + (_loop0_163_var = _loop0_163_rule(p)) // param_no_default* && (invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper && @@ -20308,18 +20422,18 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* '(' param_no_default+ ','? ')'")); - asdl_seq * _loop0_162_var; - asdl_seq * _loop1_163_var; + asdl_seq * _loop0_164_var; + asdl_seq * _loop1_165_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_162_var = _loop0_162_rule(p)) // param_no_default* + (_loop0_164_var = _loop0_164_rule(p)) // param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_loop1_163_var = _loop1_163_rule(p)) // param_no_default+ + (_loop1_165_var = _loop1_165_rule(p)) // param_no_default+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -20346,22 +20460,22 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_165_var; asdl_seq * _loop0_167_var; + asdl_seq * _loop0_169_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_166_var; + void *_tmp_168_var; Token * a; if ( - (_opt_var = _tmp_164_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] + (_opt_var = _tmp_166_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] && - (_loop0_165_var = _loop0_165_rule(p)) // param_maybe_default* + (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_166_var = _tmp_166_rule(p)) // ',' | param_no_default + (_tmp_168_var = _tmp_168_rule(p)) // ',' | param_no_default && - (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default* + (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20386,10 +20500,10 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_168_var; + asdl_seq * _loop1_170_var; Token * a; if ( - (_loop1_168_var = _loop1_168_rule(p)) // param_maybe_default+ + (_loop1_170_var = _loop1_170_rule(p)) // param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -20439,7 +20553,7 @@ invalid_default_rule(Parser *p) if ( (a = _PyPegen_expect_token(p, 22)) // token='=' && - _PyPegen_lookahead(1, _tmp_169_rule, p) + _PyPegen_lookahead(1, _tmp_171_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' &(')' | ',')")); @@ -20485,12 +20599,12 @@ invalid_star_etc_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); - void *_tmp_170_var; + void *_tmp_172_var; Token * a; if ( (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_170_var = _tmp_170_rule(p)) // ')' | ',' (')' | '**') + (_tmp_172_var = _tmp_172_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -20573,20 +20687,20 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_172_var; - void *_tmp_171_var; + asdl_seq * _loop0_174_var; void *_tmp_173_var; + void *_tmp_175_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_171_var = _tmp_171_rule(p)) // param_no_default | ',' + (_tmp_173_var = _tmp_173_rule(p)) // param_no_default | ',' && - (_loop0_172_var = _loop0_172_rule(p)) // param_maybe_default* + (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_173_var = _tmp_173_rule(p)) // param_no_default | ',' + (_tmp_175_var = _tmp_175_rule(p)) // param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); @@ -20702,7 +20816,7 @@ invalid_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_174_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_176_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param ',' ('*' | '**' | '/')")); @@ -20768,13 +20882,13 @@ invalid_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_175_var; + asdl_seq * _loop1_177_var; if ( - (_loop1_175_var = _loop1_175_rule(p)) // param_with_default+ + (_loop1_177_var = _loop1_177_rule(p)) // param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_175_var; + _res = _loop1_177_var; goto done; } p->mark = _mark; @@ -20840,13 +20954,13 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/'")); - asdl_seq * _loop0_177_var; - void *_tmp_176_var; + asdl_seq * _loop0_179_var; + void *_tmp_178_var; Token * a; if ( - (_tmp_176_var = _tmp_176_rule(p)) // lambda_slash_no_default | lambda_slash_with_default + (_tmp_178_var = _tmp_178_rule(p)) // lambda_slash_no_default | lambda_slash_with_default && - (_loop0_177_var = _loop0_177_rule(p)) // lambda_param_maybe_default* + (_loop0_179_var = _loop0_179_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20870,7 +20984,7 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default")); - asdl_seq * _loop0_178_var; + asdl_seq * _loop0_180_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -20878,7 +20992,7 @@ invalid_lambda_parameters_rule(Parser *p) if ( (_opt_var = lambda_slash_no_default_rule(p), !p->error_indicator) // lambda_slash_no_default? && - (_loop0_178_var = _loop0_178_rule(p)) // lambda_param_no_default* + (_loop0_180_var = _loop0_180_rule(p)) // lambda_param_no_default* && (invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper && @@ -20904,18 +21018,18 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* '(' ','.lambda_param+ ','? ')'")); - asdl_seq * _gather_180_var; - asdl_seq * _loop0_179_var; + asdl_seq * _gather_182_var; + asdl_seq * _loop0_181_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_179_var = _loop0_179_rule(p)) // lambda_param_no_default* + (_loop0_181_var = _loop0_181_rule(p)) // lambda_param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_180_var = _gather_180_rule(p)) // ','.lambda_param+ + (_gather_182_var = _gather_182_rule(p)) // ','.lambda_param+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -20942,22 +21056,22 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_183_var; asdl_seq * _loop0_185_var; + asdl_seq * _loop0_187_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_184_var; + void *_tmp_186_var; Token * a; if ( - (_opt_var = _tmp_182_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] + (_opt_var = _tmp_184_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] && - (_loop0_183_var = _loop0_183_rule(p)) // lambda_param_maybe_default* + (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_184_var = _tmp_184_rule(p)) // ',' | lambda_param_no_default + (_tmp_186_var = _tmp_186_rule(p)) // ',' | lambda_param_no_default && - (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_maybe_default* + (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20982,10 +21096,10 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_186_var; + asdl_seq * _loop1_188_var; Token * a; if ( - (_loop1_186_var = _loop1_186_rule(p)) // lambda_param_maybe_default+ + (_loop1_188_var = _loop1_188_rule(p)) // lambda_param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -21057,13 +21171,13 @@ invalid_lambda_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - asdl_seq * _loop1_187_var; + asdl_seq * _loop1_189_var; if ( - (_loop1_187_var = _loop1_187_rule(p)) // lambda_param_with_default+ + (_loop1_189_var = _loop1_189_rule(p)) // lambda_param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_lambda_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - _res = _loop1_187_var; + _res = _loop1_189_var; goto done; } p->mark = _mark; @@ -21100,11 +21214,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_188_var; + void *_tmp_190_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_188_var = _tmp_188_rule(p)) // ':' | ',' (':' | '**') + (_tmp_190_var = _tmp_190_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -21157,20 +21271,20 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_190_var; - void *_tmp_189_var; + asdl_seq * _loop0_192_var; void *_tmp_191_var; + void *_tmp_193_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_189_var = _tmp_189_rule(p)) // lambda_param_no_default | ',' + (_tmp_191_var = _tmp_191_rule(p)) // lambda_param_no_default | ',' && - (_loop0_190_var = _loop0_190_rule(p)) // lambda_param_maybe_default* + (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_191_var = _tmp_191_rule(p)) // lambda_param_no_default | ',' + (_tmp_193_var = _tmp_193_rule(p)) // lambda_param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); @@ -21289,7 +21403,7 @@ invalid_lambda_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_192_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_194_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param ',' ('*' | '**' | '/')")); @@ -21393,11 +21507,11 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (a = expression_rule(p)) // expression && - _PyPegen_lookahead(1, _tmp_193_rule, p) + _PyPegen_lookahead(1, _tmp_195_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression &(',' | ')' | ':')")); @@ -21446,7 +21560,7 @@ invalid_for_target_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -21553,6 +21667,59 @@ invalid_group_rule(Parser *p) return _res; } +// invalid_import: 'import' dotted_name 'from' dotted_name +static void * +invalid_import_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'import' dotted_name 'from' dotted_name + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_import[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import' dotted_name 'from' dotted_name")); + Token * _keyword; + Token * a; + expr_ty dotted_name_var; + expr_ty dotted_name_var_1; + if ( + (a = _PyPegen_expect_token(p, 606)) // token='import' + && + (dotted_name_var = dotted_name_rule(p)) // dotted_name + && + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' + && + (dotted_name_var_1 = dotted_name_rule(p)) // dotted_name + ) + { + D(fprintf(stderr, "%*c+ invalid_import[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import' dotted_name 'from' dotted_name")); + _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "Did you mean to use 'from ... import ...' instead?" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_import[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import' dotted_name 'from' dotted_name")); + } + _res = NULL; + done: + p->level--; + return _res; +} + // invalid_import_from_targets: import_from_as_names ',' NEWLINE static void * invalid_import_from_targets_rule(Parser *p) @@ -21625,7 +21792,7 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE")); - asdl_seq * _gather_194_var; + asdl_seq * _gather_196_var; Token * _keyword; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -21633,9 +21800,9 @@ invalid_with_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && - (_gather_194_var = _gather_194_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_196_var = _gather_196_rule(p)) // ','.(expression ['as' star_target])+ && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -21659,7 +21826,7 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); - asdl_seq * _gather_196_var; + asdl_seq * _gather_198_var; Token * _keyword; Token * _literal; Token * _literal_1; @@ -21671,11 +21838,11 @@ invalid_with_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_196_var = _gather_196_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_198_var = _gather_198_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21725,7 +21892,7 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); - asdl_seq * _gather_198_var; + asdl_seq * _gather_200_var; Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -21734,9 +21901,9 @@ invalid_with_stmt_indent_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 612)) // token='with' + (a = _PyPegen_expect_token(p, 614)) // token='with' && - (_gather_198_var = _gather_198_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_200_var = _gather_200_rule(p)) // ','.(expression ['as' star_target])+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -21764,7 +21931,7 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); - asdl_seq * _gather_200_var; + asdl_seq * _gather_202_var; Token * _literal; Token * _literal_1; Token * _literal_2; @@ -21777,11 +21944,11 @@ invalid_with_stmt_indent_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 612)) // token='with' + (a = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_200_var = _gather_200_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_202_var = _gather_202_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21816,7 +21983,8 @@ invalid_with_stmt_indent_rule(Parser *p) // invalid_try_stmt: // | 'try' ':' NEWLINE !INDENT // | 'try' ':' block !('except' | 'finally') -// | 'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block* +// | 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' +// | 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' static void * invalid_try_stmt_rule(Parser *p) { @@ -21840,7 +22008,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 618)) // token='try' + (a = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -21872,13 +22040,13 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (block_var = block_rule(p)) // block && - _PyPegen_lookahead(0, _tmp_202_rule, p) + _PyPegen_lookahead(0, _tmp_204_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block !('except' | 'finally')")); @@ -21894,31 +22062,44 @@ invalid_try_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_try_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block !('except' | 'finally')")); } - { // 'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block* + { // 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block*")); + D(fprintf(stderr, "%*c> invalid_try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':'")); Token * _keyword; Token * _literal; - asdl_seq * _loop0_203_var; + Token * _literal_1; asdl_seq * _loop0_205_var; - void *_tmp_204_var; + asdl_seq * _loop1_206_var; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + Token * a; + Token * b; + expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (_loop0_203_var = _loop0_203_rule(p)) // block* + (_loop0_205_var = _loop0_205_rule(p)) // block* && - (_tmp_204_var = _tmp_204_rule(p)) // (except_block+ except_star_block) | (except_star_block+ except_block) + (_loop1_206_var = _loop1_206_rule(p)) // except_block+ && - (_loop0_205_var = _loop0_205_rule(p)) // block* + (a = _PyPegen_expect_token(p, 636)) // token='except' + && + (b = _PyPegen_expect_token(p, 16)) // token='*' + && + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_207_rule(p), !p->error_indicator) // ['as' NAME] + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block*")); - _res = RAISE_SYNTAX_ERROR ( "cannot have both 'except' and 'except*' on the same 'try'" ); + D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "cannot have both 'except' and 'except*' on the same 'try'" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; @@ -21928,7 +22109,50 @@ invalid_try_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_try_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block*")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':'")); + } + { // 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':'")); + Token * _keyword; + Token * _literal; + Token * _literal_1; + asdl_seq * _loop0_208_var; + asdl_seq * _loop1_209_var; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + Token * a; + if ( + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (_loop0_208_var = _loop0_208_rule(p)) // block* + && + (_loop1_209_var = _loop1_209_rule(p)) // except_star_block+ + && + (a = _PyPegen_expect_token(p, 636)) // token='except' + && + (_opt_var = _tmp_210_rule(p), !p->error_indicator) // [expression ['as' NAME]] + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot have both 'except' and 'except*' on the same 'try'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_try_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':'")); } _res = NULL; done: @@ -21970,7 +22194,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty a; expr_ty expressions_var; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -21980,7 +22204,7 @@ invalid_except_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_opt_var_1 = _tmp_206_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_211_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -22012,13 +22236,13 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && (expression_var = expression_rule(p)) // expression && - (_opt_var_1 = _tmp_207_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -22045,7 +22269,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -22070,14 +22294,14 @@ invalid_except_stmt_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); Token * _literal; - void *_tmp_208_var; + void *_tmp_213_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_208_var = _tmp_208_rule(p)) // NEWLINE | ':' + (_tmp_213_var = _tmp_213_rule(p)) // NEWLINE | ':' ) { D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); @@ -22123,7 +22347,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 625)) // token='finally' + (a = _PyPegen_expect_token(p, 632)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22180,11 +22404,11 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_209_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_214_rule(p), !p->error_indicator) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22216,7 +22440,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22273,13 +22497,13 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_210_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_215_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22515,7 +22739,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -22545,7 +22769,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p) && @@ -22648,7 +22872,7 @@ invalid_class_argument_pattern_rule(Parser *p) asdl_pattern_seq* a; asdl_seq* keyword_patterns_var; if ( - (_opt_var = _tmp_211_rule(p), !p->error_indicator) // [positional_patterns ','] + (_opt_var = _tmp_216_rule(p), !p->error_indicator) // [positional_patterns ','] && (keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns && @@ -22702,7 +22926,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22733,7 +22957,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 634)) // token='if' + (a = _PyPegen_expect_token(p, 641)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -22789,7 +23013,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22820,7 +23044,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 636)) // token='elif' + (a = _PyPegen_expect_token(p, 643)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22874,7 +23098,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 637)) // token='else' + (a = _PyPegen_expect_token(p, 644)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22928,7 +23152,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='while' + (_keyword = _PyPegen_expect_token(p, 646)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22959,7 +23183,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 639)) // token='while' + (a = _PyPegen_expect_token(p, 646)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23021,11 +23245,11 @@ invalid_for_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -23062,11 +23286,11 @@ invalid_for_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 642)) // token='for' + (a = _PyPegen_expect_token(p, 649)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword = _PyPegen_expect_token(p, 650)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -23132,7 +23356,7 @@ invalid_def_raw_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 644)) // token='def' + (a = _PyPegen_expect_token(p, 651)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -23142,7 +23366,7 @@ invalid_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (_opt_var_2 = _tmp_212_rule(p), !p->error_indicator) // ['->' expression] + (_opt_var_2 = _tmp_217_rule(p), !p->error_indicator) // ['->' expression] && (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23198,11 +23422,11 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='class' + (_keyword = _PyPegen_expect_token(p, 653)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && - (_opt_var = _tmp_213_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var = _tmp_218_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23233,11 +23457,11 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 646)) // token='class' + (a = _PyPegen_expect_token(p, 653)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && - (_opt_var = _tmp_214_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23288,11 +23512,11 @@ invalid_double_starred_kvpairs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - asdl_seq * _gather_215_var; + asdl_seq * _gather_220_var; Token * _literal; void *invalid_kvpair_var; if ( - (_gather_215_var = _gather_215_rule(p)) // ','.double_starred_kvpair+ + (_gather_220_var = _gather_220_rule(p)) // ','.double_starred_kvpair+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -23300,7 +23524,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - _res = _PyPegen_dummy_name(p, _gather_215_var, _literal, invalid_kvpair_var); + _res = _PyPegen_dummy_name(p, _gather_220_var, _literal, invalid_kvpair_var); goto done; } p->mark = _mark; @@ -23353,7 +23577,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_217_rule, p) + _PyPegen_lookahead(1, _tmp_222_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -23464,7 +23688,7 @@ invalid_kvpair_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_218_rule, p) + _PyPegen_lookahead(1, _tmp_223_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -23486,6 +23710,59 @@ invalid_kvpair_rule(Parser *p) return _res; } +// invalid_starred_expression: '*' expression '=' expression +static void * +invalid_starred_expression_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '*' expression '=' expression + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_starred_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression '=' expression")); + Token * _literal; + Token * a; + expr_ty b; + expr_ty expression_var; + if ( + (a = _PyPegen_expect_token(p, 16)) // token='*' + && + (expression_var = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ invalid_starred_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression '=' expression")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "cannot assign to iterable argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_starred_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression '=' expression")); + } + _res = NULL; + done: + p->level--; + return _res; +} + // _loop0_1: NEWLINE static asdl_seq * _loop0_1_rule(Parser *p) @@ -23840,7 +24117,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword = _PyPegen_expect_token(p, 606)) // token='import' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); @@ -23859,7 +24136,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); @@ -23898,7 +24175,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='def' + (_keyword = _PyPegen_expect_token(p, 651)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -23975,7 +24252,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='class' + (_keyword = _PyPegen_expect_token(p, 653)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -24033,7 +24310,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -24091,7 +24368,7 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -24320,12 +24597,12 @@ _loop1_14_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_219_var; + void *_tmp_224_var; while ( - (_tmp_219_var = _tmp_219_rule(p)) // star_targets '=' + (_tmp_224_var = _tmp_224_rule(p)) // star_targets '=' ) { - _res = _tmp_219_var; + _res = _tmp_224_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -24503,7 +24780,7 @@ _tmp_17_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' && (z = expression_rule(p)) // expression ) @@ -24902,12 +25179,12 @@ _loop0_24_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_220_var; + void *_tmp_225_var; while ( - (_tmp_220_var = _tmp_220_rule(p)) // '.' | '...' + (_tmp_225_var = _tmp_225_rule(p)) // '.' | '...' ) { - _res = _tmp_220_var; + _res = _tmp_225_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -24971,12 +25248,12 @@ _loop1_25_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_221_var; + void *_tmp_226_var; while ( - (_tmp_221_var = _tmp_221_rule(p)) // '.' | '...' + (_tmp_226_var = _tmp_226_rule(p)) // '.' | '...' ) { - _res = _tmp_221_var; + _res = _tmp_226_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -25158,7 +25435,7 @@ _tmp_28_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -25325,7 +25602,7 @@ _tmp_31_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -25379,12 +25656,12 @@ _loop1_32_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_222_var; + void *_tmp_227_var; while ( - (_tmp_222_var = _tmp_222_rule(p)) // '@' named_expression NEWLINE + (_tmp_227_var = _tmp_227_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_222_var; + _res = _tmp_227_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -27365,7 +27642,7 @@ _tmp_62_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -27412,7 +27689,7 @@ _tmp_63_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -28468,12 +28745,12 @@ _loop1_80_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_223_var; + void *_tmp_228_var; while ( - (_tmp_223_var = _tmp_223_rule(p)) // ',' expression + (_tmp_228_var = _tmp_228_rule(p)) // ',' expression ) { - _res = _tmp_223_var; + _res = _tmp_228_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28542,12 +28819,12 @@ _loop1_81_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_224_var; + void *_tmp_229_var; while ( - (_tmp_224_var = _tmp_224_rule(p)) // ',' star_expression + (_tmp_229_var = _tmp_229_rule(p)) // ',' star_expression ) { - _res = _tmp_224_var; + _res = _tmp_229_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28736,12 +29013,12 @@ _loop1_84_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_225_var; + void *_tmp_230_var; while ( - (_tmp_225_var = _tmp_225_rule(p)) // 'or' conjunction + (_tmp_230_var = _tmp_230_rule(p)) // 'or' conjunction ) { - _res = _tmp_225_var; + _res = _tmp_230_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28810,12 +29087,12 @@ _loop1_85_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_226_var; + void *_tmp_231_var; while ( - (_tmp_226_var = _tmp_226_rule(p)) // 'and' inversion + (_tmp_231_var = _tmp_231_rule(p)) // 'and' inversion ) { - _res = _tmp_226_var; + _res = _tmp_231_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -29007,7 +29284,7 @@ _loop0_89_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_227_rule(p)) // slice | starred_expression + (elem = _tmp_232_rule(p)) // slice | starred_expression ) { _res = elem; @@ -29073,7 +29350,7 @@ _gather_88_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_227_rule(p)) // slice | starred_expression + (elem = _tmp_232_rule(p)) // slice | starred_expression && (seq = _loop0_89_rule(p)) // _loop0_89 ) @@ -30777,12 +31054,12 @@ _loop0_114_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_228_var; + void *_tmp_233_var; while ( - (_tmp_228_var = _tmp_228_rule(p)) // 'if' disjunction + (_tmp_233_var = _tmp_233_rule(p)) // 'if' disjunction ) { - _res = _tmp_228_var; + _res = _tmp_233_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30846,12 +31123,12 @@ _loop0_115_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_229_var; + void *_tmp_234_var; while ( - (_tmp_229_var = _tmp_229_rule(p)) // 'if' disjunction + (_tmp_234_var = _tmp_234_rule(p)) // 'if' disjunction ) { - _res = _tmp_229_var; + _res = _tmp_234_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30980,7 +31257,7 @@ _loop0_118_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_230_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' ) { _res = elem; @@ -31047,7 +31324,7 @@ _gather_117_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_230_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' && (seq = _loop0_118_rule(p)) // _loop0_118 ) @@ -31623,12 +31900,12 @@ _loop0_128_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_231_var; + void *_tmp_236_var; while ( - (_tmp_231_var = _tmp_231_rule(p)) // ',' star_target + (_tmp_236_var = _tmp_236_rule(p)) // ',' star_target ) { - _res = _tmp_231_var; + _res = _tmp_236_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -31812,12 +32089,12 @@ _loop1_131_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_232_var; + void *_tmp_237_var; while ( - (_tmp_232_var = _tmp_232_rule(p)) // ',' star_target + (_tmp_237_var = _tmp_237_rule(p)) // ',' star_target ) { - _res = _tmp_232_var; + _res = _tmp_237_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32600,9 +32877,109 @@ _tmp_144_rule(Parser *p) return _res; } -// _tmp_145: 'True' | 'False' | 'None' +// _tmp_145: args ',' static void * _tmp_145_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // args ',' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','")); + Token * _literal; + expr_ty args_var; + if ( + (args_var = args_rule(p)) // args + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','")); + _res = _PyPegen_dummy_name(p, args_var, _literal); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_146: ',' | ')' +static void * +_tmp_146_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); + } + { // ')' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_147: 'True' | 'False' | 'None' +static void * +_tmp_147_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32619,18 +32996,18 @@ _tmp_145_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 600)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'False' @@ -32638,18 +33015,18 @@ _tmp_145_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 602)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } { // 'None' @@ -32657,18 +33034,18 @@ _tmp_145_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 601)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } _res = NULL; @@ -32677,9 +33054,9 @@ _tmp_145_rule(Parser *p) return _res; } -// _tmp_146: NAME '=' +// _tmp_148: NAME '=' static void * -_tmp_146_rule(Parser *p) +_tmp_148_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32696,7 +33073,7 @@ _tmp_146_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); Token * _literal; expr_ty name_var; if ( @@ -32705,12 +33082,12 @@ _tmp_146_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); _res = _PyPegen_dummy_name(p, name_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '='")); } _res = NULL; @@ -32719,9 +33096,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: NAME STRING | SOFT_KEYWORD +// _tmp_149: NAME STRING | SOFT_KEYWORD static void * -_tmp_147_rule(Parser *p) +_tmp_149_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32738,7 +33115,7 @@ _tmp_147_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); expr_ty name_var; expr_ty string_var; if ( @@ -32747,12 +33124,12 @@ _tmp_147_rule(Parser *p) (string_var = _PyPegen_string_token(p)) // STRING ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); _res = _PyPegen_dummy_name(p, name_var, string_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME STRING")); } { // SOFT_KEYWORD @@ -32760,18 +33137,18 @@ _tmp_147_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); expr_ty soft_keyword_var; if ( (soft_keyword_var = _PyPegen_soft_keyword_token(p)) // SOFT_KEYWORD ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); _res = soft_keyword_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "SOFT_KEYWORD")); } _res = NULL; @@ -32780,9 +33157,9 @@ _tmp_147_rule(Parser *p) return _res; } -// _tmp_148: 'else' | ':' +// _tmp_150: 'else' | ':' static void * -_tmp_148_rule(Parser *p) +_tmp_150_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32799,18 +33176,18 @@ _tmp_148_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword = _PyPegen_expect_token(p, 644)) // token='else' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else'")); } { // ':' @@ -32818,18 +33195,18 @@ _tmp_148_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -32838,9 +33215,9 @@ _tmp_148_rule(Parser *p) return _res; } -// _tmp_149: '=' | ':=' +// _tmp_151: '=' | ':=' static void * -_tmp_149_rule(Parser *p) +_tmp_151_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32857,18 +33234,18 @@ _tmp_149_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -32876,18 +33253,18 @@ _tmp_149_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -32896,9 +33273,9 @@ _tmp_149_rule(Parser *p) return _res; } -// _tmp_150: list | tuple | genexp | 'True' | 'None' | 'False' +// _tmp_152: list | tuple | genexp | 'True' | 'None' | 'False' static void * -_tmp_150_rule(Parser *p) +_tmp_152_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32915,18 +33292,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); _res = list_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); } { // tuple @@ -32934,18 +33311,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); _res = tuple_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); } { // genexp @@ -32953,18 +33330,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); _res = genexp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); } { // 'True' @@ -32972,18 +33349,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 600)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'None' @@ -32991,18 +33368,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 601)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } { // 'False' @@ -33010,18 +33387,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 602)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } _res = NULL; @@ -33030,9 +33407,9 @@ _tmp_150_rule(Parser *p) return _res; } -// _tmp_151: '=' | ':=' +// _tmp_153: '=' | ':=' static void * -_tmp_151_rule(Parser *p) +_tmp_153_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33049,18 +33426,18 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -33068,18 +33445,18 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -33088,9 +33465,9 @@ _tmp_151_rule(Parser *p) return _res; } -// _loop0_152: star_named_expressions +// _loop0_154: star_named_expressions static asdl_seq * -_loop0_152_rule(Parser *p) +_loop0_154_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33117,7 +33494,7 @@ _loop0_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); + D(fprintf(stderr, "%*c> _loop0_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); asdl_expr_seq* star_named_expressions_var; while ( (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions @@ -33139,7 +33516,7 @@ _loop0_152_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33152,14 +33529,14 @@ _loop0_152_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_152_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_154_type, _seq); p->level--; return _seq; } -// _loop0_153: (star_targets '=') +// _loop0_155: (star_targets '=') static asdl_seq * -_loop0_153_rule(Parser *p) +_loop0_155_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33186,13 +33563,13 @@ _loop0_153_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_233_var; + D(fprintf(stderr, "%*c> _loop0_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_238_var; while ( - (_tmp_233_var = _tmp_233_rule(p)) // star_targets '=' + (_tmp_238_var = _tmp_238_rule(p)) // star_targets '=' ) { - _res = _tmp_233_var; + _res = _tmp_238_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33208,7 +33585,7 @@ _loop0_153_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_155[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33221,14 +33598,14 @@ _loop0_153_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_153_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_155_type, _seq); p->level--; return _seq; } -// _loop0_154: (star_targets '=') +// _loop0_156: (star_targets '=') static asdl_seq * -_loop0_154_rule(Parser *p) +_loop0_156_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33255,13 +33632,13 @@ _loop0_154_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_234_var; + D(fprintf(stderr, "%*c> _loop0_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_239_var; while ( - (_tmp_234_var = _tmp_234_rule(p)) // star_targets '=' + (_tmp_239_var = _tmp_239_rule(p)) // star_targets '=' ) { - _res = _tmp_234_var; + _res = _tmp_239_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33277,7 +33654,7 @@ _loop0_154_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_156[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33290,14 +33667,14 @@ _loop0_154_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_154_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_156_type, _seq); p->level--; return _seq; } -// _tmp_155: yield_expr | star_expressions +// _tmp_157: yield_expr | star_expressions static void * -_tmp_155_rule(Parser *p) +_tmp_157_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33314,18 +33691,18 @@ _tmp_155_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -33333,18 +33710,18 @@ _tmp_155_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -33353,9 +33730,9 @@ _tmp_155_rule(Parser *p) return _res; } -// _tmp_156: '[' | '(' | '{' +// _tmp_158: '[' | '(' | '{' static void * -_tmp_156_rule(Parser *p) +_tmp_158_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33372,18 +33749,18 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' @@ -33391,18 +33768,18 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' @@ -33410,18 +33787,18 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33430,9 +33807,9 @@ _tmp_156_rule(Parser *p) return _res; } -// _tmp_157: '[' | '{' +// _tmp_159: '[' | '{' static void * -_tmp_157_rule(Parser *p) +_tmp_159_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33449,18 +33826,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -33468,18 +33845,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33488,9 +33865,9 @@ _tmp_157_rule(Parser *p) return _res; } -// _tmp_158: '[' | '{' +// _tmp_160: '[' | '{' static void * -_tmp_158_rule(Parser *p) +_tmp_160_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33507,18 +33884,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -33526,18 +33903,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33546,9 +33923,9 @@ _tmp_158_rule(Parser *p) return _res; } -// _tmp_159: slash_no_default | slash_with_default +// _tmp_161: slash_no_default | slash_with_default static void * -_tmp_159_rule(Parser *p) +_tmp_161_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33565,18 +33942,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -33584,18 +33961,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -33604,9 +33981,9 @@ _tmp_159_rule(Parser *p) return _res; } -// _loop0_160: param_maybe_default +// _loop0_162: param_maybe_default static asdl_seq * -_loop0_160_rule(Parser *p) +_loop0_162_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33633,7 +34010,7 @@ _loop0_160_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -33655,7 +34032,7 @@ _loop0_160_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_160[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33668,14 +34045,14 @@ _loop0_160_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_160_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_162_type, _seq); p->level--; return _seq; } -// _loop0_161: param_no_default +// _loop0_163: param_no_default static asdl_seq * -_loop0_161_rule(Parser *p) +_loop0_163_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33702,7 +34079,7 @@ _loop0_161_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -33724,7 +34101,7 @@ _loop0_161_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_161[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_163[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33737,14 +34114,14 @@ _loop0_161_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_161_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_163_type, _seq); p->level--; return _seq; } -// _loop0_162: param_no_default +// _loop0_164: param_no_default static asdl_seq * -_loop0_162_rule(Parser *p) +_loop0_164_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33771,7 +34148,7 @@ _loop0_162_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -33793,7 +34170,7 @@ _loop0_162_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_164[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33806,14 +34183,14 @@ _loop0_162_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_162_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_164_type, _seq); p->level--; return _seq; } -// _loop1_163: param_no_default +// _loop1_165: param_no_default static asdl_seq * -_loop1_163_rule(Parser *p) +_loop1_165_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33840,7 +34217,7 @@ _loop1_163_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -33862,7 +34239,7 @@ _loop1_163_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_163[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_165[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -33880,14 +34257,14 @@ _loop1_163_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_163_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_165_type, _seq); p->level--; return _seq; } -// _tmp_164: slash_no_default | slash_with_default +// _tmp_166: slash_no_default | slash_with_default static void * -_tmp_164_rule(Parser *p) +_tmp_166_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33904,18 +34281,18 @@ _tmp_164_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -33923,18 +34300,18 @@ _tmp_164_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -33943,9 +34320,9 @@ _tmp_164_rule(Parser *p) return _res; } -// _loop0_165: param_maybe_default +// _loop0_167: param_maybe_default static asdl_seq * -_loop0_165_rule(Parser *p) +_loop0_167_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33972,7 +34349,7 @@ _loop0_165_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -33994,7 +34371,7 @@ _loop0_165_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_165[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34007,14 +34384,14 @@ _loop0_165_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_165_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_167_type, _seq); p->level--; return _seq; } -// _tmp_166: ',' | param_no_default +// _tmp_168: ',' | param_no_default static void * -_tmp_166_rule(Parser *p) +_tmp_168_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34031,18 +34408,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // param_no_default @@ -34050,18 +34427,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } _res = NULL; @@ -34070,9 +34447,9 @@ _tmp_166_rule(Parser *p) return _res; } -// _loop0_167: param_maybe_default +// _loop0_169: param_maybe_default static asdl_seq * -_loop0_167_rule(Parser *p) +_loop0_169_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34099,7 +34476,7 @@ _loop0_167_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34121,7 +34498,7 @@ _loop0_167_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34134,14 +34511,14 @@ _loop0_167_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_167_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_169_type, _seq); p->level--; return _seq; } -// _loop1_168: param_maybe_default +// _loop1_170: param_maybe_default static asdl_seq * -_loop1_168_rule(Parser *p) +_loop1_170_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34168,7 +34545,7 @@ _loop1_168_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34190,7 +34567,7 @@ _loop1_168_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_168[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -34208,14 +34585,14 @@ _loop1_168_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_168_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_170_type, _seq); p->level--; return _seq; } -// _tmp_169: ')' | ',' +// _tmp_171: ')' | ',' static void * -_tmp_169_rule(Parser *p) +_tmp_171_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34232,18 +34609,18 @@ _tmp_169_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' @@ -34251,18 +34628,18 @@ _tmp_169_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34271,9 +34648,9 @@ _tmp_169_rule(Parser *p) return _res; } -// _tmp_170: ')' | ',' (')' | '**') +// _tmp_172: ')' | ',' (')' | '**') static void * -_tmp_170_rule(Parser *p) +_tmp_172_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34290,18 +34667,18 @@ _tmp_170_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_170[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -34309,21 +34686,21 @@ _tmp_170_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_235_var; + void *_tmp_240_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_235_var = _tmp_235_rule(p)) // ')' | '**' + (_tmp_240_var = _tmp_240_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_235_var); + D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_240_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_170[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -34332,9 +34709,9 @@ _tmp_170_rule(Parser *p) return _res; } -// _tmp_171: param_no_default | ',' +// _tmp_173: param_no_default | ',' static void * -_tmp_171_rule(Parser *p) +_tmp_173_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34351,18 +34728,18 @@ _tmp_171_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -34370,18 +34747,18 @@ _tmp_171_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34390,9 +34767,9 @@ _tmp_171_rule(Parser *p) return _res; } -// _loop0_172: param_maybe_default +// _loop0_174: param_maybe_default static asdl_seq * -_loop0_172_rule(Parser *p) +_loop0_174_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34419,7 +34796,7 @@ _loop0_172_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34441,7 +34818,7 @@ _loop0_172_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_172[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34454,14 +34831,14 @@ _loop0_172_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_172_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_174_type, _seq); p->level--; return _seq; } -// _tmp_173: param_no_default | ',' +// _tmp_175: param_no_default | ',' static void * -_tmp_173_rule(Parser *p) +_tmp_175_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34478,18 +34855,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -34497,18 +34874,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34517,9 +34894,9 @@ _tmp_173_rule(Parser *p) return _res; } -// _tmp_174: '*' | '**' | '/' +// _tmp_176: '*' | '**' | '/' static void * -_tmp_174_rule(Parser *p) +_tmp_176_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34536,18 +34913,18 @@ _tmp_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -34555,18 +34932,18 @@ _tmp_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -34574,18 +34951,18 @@ _tmp_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -34594,9 +34971,9 @@ _tmp_174_rule(Parser *p) return _res; } -// _loop1_175: param_with_default +// _loop1_177: param_with_default static asdl_seq * -_loop1_175_rule(Parser *p) +_loop1_177_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34623,7 +35000,7 @@ _loop1_175_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -34645,7 +35022,7 @@ _loop1_175_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_175[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_177[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -34663,14 +35040,14 @@ _loop1_175_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_175_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_177_type, _seq); p->level--; return _seq; } -// _tmp_176: lambda_slash_no_default | lambda_slash_with_default +// _tmp_178: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_176_rule(Parser *p) +_tmp_178_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34687,18 +35064,18 @@ _tmp_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -34706,18 +35083,18 @@ _tmp_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -34726,9 +35103,9 @@ _tmp_176_rule(Parser *p) return _res; } -// _loop0_177: lambda_param_maybe_default +// _loop0_179: lambda_param_maybe_default static asdl_seq * -_loop0_177_rule(Parser *p) +_loop0_179_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34755,7 +35132,7 @@ _loop0_177_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -34777,7 +35154,7 @@ _loop0_177_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_177[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34790,14 +35167,14 @@ _loop0_177_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_177_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_179_type, _seq); p->level--; return _seq; } -// _loop0_178: lambda_param_no_default +// _loop0_180: lambda_param_no_default static asdl_seq * -_loop0_178_rule(Parser *p) +_loop0_180_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34824,7 +35201,7 @@ _loop0_178_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -34846,7 +35223,7 @@ _loop0_178_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_178[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_180[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34859,14 +35236,14 @@ _loop0_178_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_178_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_180_type, _seq); p->level--; return _seq; } -// _loop0_179: lambda_param_no_default +// _loop0_181: lambda_param_no_default static asdl_seq * -_loop0_179_rule(Parser *p) +_loop0_181_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34893,7 +35270,7 @@ _loop0_179_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -34915,7 +35292,7 @@ _loop0_179_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34928,14 +35305,14 @@ _loop0_179_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_179_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_181_type, _seq); p->level--; return _seq; } -// _loop0_181: ',' lambda_param +// _loop0_183: ',' lambda_param static asdl_seq * -_loop0_181_rule(Parser *p) +_loop0_183_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34962,7 +35339,7 @@ _loop0_181_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); + D(fprintf(stderr, "%*c> _loop0_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); Token * _literal; arg_ty elem; while ( @@ -34993,7 +35370,7 @@ _loop0_181_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_183[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' lambda_param")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35006,14 +35383,14 @@ _loop0_181_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_181_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_183_type, _seq); p->level--; return _seq; } -// _gather_180: lambda_param _loop0_181 +// _gather_182: lambda_param _loop0_183 static asdl_seq * -_gather_180_rule(Parser *p) +_gather_182_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35025,27 +35402,27 @@ _gather_180_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // lambda_param _loop0_181 + { // lambda_param _loop0_183 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_181")); + D(fprintf(stderr, "%*c> _gather_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183")); arg_ty elem; asdl_seq * seq; if ( (elem = lambda_param_rule(p)) // lambda_param && - (seq = _loop0_181_rule(p)) // _loop0_181 + (seq = _loop0_183_rule(p)) // _loop0_183 ) { - D(fprintf(stderr, "%*c+ _gather_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_181")); + D(fprintf(stderr, "%*c+ _gather_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_180[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_181")); + D(fprintf(stderr, "%*c%s _gather_182[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_183")); } _res = NULL; done: @@ -35053,9 +35430,9 @@ _gather_180_rule(Parser *p) return _res; } -// _tmp_182: lambda_slash_no_default | lambda_slash_with_default +// _tmp_184: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_182_rule(Parser *p) +_tmp_184_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35072,18 +35449,18 @@ _tmp_182_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_182[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -35091,18 +35468,18 @@ _tmp_182_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_182[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -35111,9 +35488,9 @@ _tmp_182_rule(Parser *p) return _res; } -// _loop0_183: lambda_param_maybe_default +// _loop0_185: lambda_param_maybe_default static asdl_seq * -_loop0_183_rule(Parser *p) +_loop0_185_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35140,7 +35517,7 @@ _loop0_183_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35162,7 +35539,7 @@ _loop0_183_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_183[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35175,14 +35552,14 @@ _loop0_183_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_183_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_185_type, _seq); p->level--; return _seq; } -// _tmp_184: ',' | lambda_param_no_default +// _tmp_186: ',' | lambda_param_no_default static void * -_tmp_184_rule(Parser *p) +_tmp_186_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35199,18 +35576,18 @@ _tmp_184_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // lambda_param_no_default @@ -35218,18 +35595,18 @@ _tmp_184_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } _res = NULL; @@ -35238,9 +35615,9 @@ _tmp_184_rule(Parser *p) return _res; } -// _loop0_185: lambda_param_maybe_default +// _loop0_187: lambda_param_maybe_default static asdl_seq * -_loop0_185_rule(Parser *p) +_loop0_187_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35267,7 +35644,7 @@ _loop0_185_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35289,7 +35666,7 @@ _loop0_185_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_187[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35302,14 +35679,14 @@ _loop0_185_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_185_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_187_type, _seq); p->level--; return _seq; } -// _loop1_186: lambda_param_maybe_default +// _loop1_188: lambda_param_maybe_default static asdl_seq * -_loop1_186_rule(Parser *p) +_loop1_188_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35336,7 +35713,7 @@ _loop1_186_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35358,7 +35735,7 @@ _loop1_186_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_186[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_188[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -35376,14 +35753,14 @@ _loop1_186_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_186_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_188_type, _seq); p->level--; return _seq; } -// _loop1_187: lambda_param_with_default +// _loop1_189: lambda_param_with_default static asdl_seq * -_loop1_187_rule(Parser *p) +_loop1_189_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35410,7 +35787,7 @@ _loop1_187_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -35432,7 +35809,7 @@ _loop1_187_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_187[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_189[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -35450,14 +35827,14 @@ _loop1_187_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_187_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_189_type, _seq); p->level--; return _seq; } -// _tmp_188: ':' | ',' (':' | '**') +// _tmp_190: ':' | ',' (':' | '**') static void * -_tmp_188_rule(Parser *p) +_tmp_190_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35474,18 +35851,18 @@ _tmp_188_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_188[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_188[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -35493,21 +35870,21 @@ _tmp_188_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_236_var; + void *_tmp_241_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_236_var = _tmp_236_rule(p)) // ':' | '**' + (_tmp_241_var = _tmp_241_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_188[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_236_var); + D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_241_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_188[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -35516,9 +35893,9 @@ _tmp_188_rule(Parser *p) return _res; } -// _tmp_189: lambda_param_no_default | ',' +// _tmp_191: lambda_param_no_default | ',' static void * -_tmp_189_rule(Parser *p) +_tmp_191_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35535,18 +35912,18 @@ _tmp_189_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -35554,18 +35931,18 @@ _tmp_189_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -35574,9 +35951,9 @@ _tmp_189_rule(Parser *p) return _res; } -// _loop0_190: lambda_param_maybe_default +// _loop0_192: lambda_param_maybe_default static asdl_seq * -_loop0_190_rule(Parser *p) +_loop0_192_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35603,7 +35980,7 @@ _loop0_190_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35625,7 +36002,7 @@ _loop0_190_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_190[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35638,14 +36015,14 @@ _loop0_190_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_190_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_192_type, _seq); p->level--; return _seq; } -// _tmp_191: lambda_param_no_default | ',' +// _tmp_193: lambda_param_no_default | ',' static void * -_tmp_191_rule(Parser *p) +_tmp_193_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35662,18 +36039,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -35681,18 +36058,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -35701,9 +36078,9 @@ _tmp_191_rule(Parser *p) return _res; } -// _tmp_192: '*' | '**' | '/' +// _tmp_194: '*' | '**' | '/' static void * -_tmp_192_rule(Parser *p) +_tmp_194_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35720,18 +36097,18 @@ _tmp_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -35739,18 +36116,18 @@ _tmp_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -35758,18 +36135,18 @@ _tmp_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -35778,9 +36155,9 @@ _tmp_192_rule(Parser *p) return _res; } -// _tmp_193: ',' | ')' | ':' +// _tmp_195: ',' | ')' | ':' static void * -_tmp_193_rule(Parser *p) +_tmp_195_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35797,18 +36174,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ')' @@ -35816,18 +36193,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ':' @@ -35835,18 +36212,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -35855,9 +36232,9 @@ _tmp_193_rule(Parser *p) return _res; } -// _loop0_195: ',' (expression ['as' star_target]) +// _loop0_197: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_195_rule(Parser *p) +_loop0_197_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35884,13 +36261,13 @@ _loop0_195_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_237_rule(p)) // expression ['as' star_target] + (elem = _tmp_242_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -35915,7 +36292,7 @@ _loop0_195_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_195[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35928,14 +36305,14 @@ _loop0_195_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_195_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_197_type, _seq); p->level--; return _seq; } -// _gather_194: (expression ['as' star_target]) _loop0_195 +// _gather_196: (expression ['as' star_target]) _loop0_197 static asdl_seq * -_gather_194_rule(Parser *p) +_gather_196_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35947,27 +36324,27 @@ _gather_194_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_195 + { // (expression ['as' star_target]) _loop0_197 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_195")); + D(fprintf(stderr, "%*c> _gather_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_237_rule(p)) // expression ['as' star_target] + (elem = _tmp_242_rule(p)) // expression ['as' star_target] && - (seq = _loop0_195_rule(p)) // _loop0_195 + (seq = _loop0_197_rule(p)) // _loop0_197 ) { - D(fprintf(stderr, "%*c+ _gather_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_195")); + D(fprintf(stderr, "%*c+ _gather_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_194[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_195")); + D(fprintf(stderr, "%*c%s _gather_196[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); } _res = NULL; done: @@ -35975,9 +36352,9 @@ _gather_194_rule(Parser *p) return _res; } -// _loop0_197: ',' (expressions ['as' star_target]) +// _loop0_199: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_197_rule(Parser *p) +_loop0_199_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36004,13 +36381,13 @@ _loop0_197_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_238_rule(p)) // expressions ['as' star_target] + (elem = _tmp_243_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -36035,7 +36412,7 @@ _loop0_197_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36048,14 +36425,14 @@ _loop0_197_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_197_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_199_type, _seq); p->level--; return _seq; } -// _gather_196: (expressions ['as' star_target]) _loop0_197 +// _gather_198: (expressions ['as' star_target]) _loop0_199 static asdl_seq * -_gather_196_rule(Parser *p) +_gather_198_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36067,27 +36444,27 @@ _gather_196_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_197 + { // (expressions ['as' star_target]) _loop0_199 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c> _gather_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_238_rule(p)) // expressions ['as' star_target] + (elem = _tmp_243_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_197_rule(p)) // _loop0_197 + (seq = _loop0_199_rule(p)) // _loop0_199 ) { - D(fprintf(stderr, "%*c+ _gather_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c+ _gather_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_196[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c%s _gather_198[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); } _res = NULL; done: @@ -36095,9 +36472,9 @@ _gather_196_rule(Parser *p) return _res; } -// _loop0_199: ',' (expression ['as' star_target]) +// _loop0_201: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_199_rule(Parser *p) +_loop0_201_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36124,13 +36501,13 @@ _loop0_199_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_239_rule(p)) // expression ['as' star_target] + (elem = _tmp_244_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -36155,7 +36532,7 @@ _loop0_199_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_201[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36168,14 +36545,14 @@ _loop0_199_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_199_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_201_type, _seq); p->level--; return _seq; } -// _gather_198: (expression ['as' star_target]) _loop0_199 +// _gather_200: (expression ['as' star_target]) _loop0_201 static asdl_seq * -_gather_198_rule(Parser *p) +_gather_200_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36187,27 +36564,27 @@ _gather_198_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_199 + { // (expression ['as' star_target]) _loop0_201 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c> _gather_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_239_rule(p)) // expression ['as' star_target] + (elem = _tmp_244_rule(p)) // expression ['as' star_target] && - (seq = _loop0_199_rule(p)) // _loop0_199 + (seq = _loop0_201_rule(p)) // _loop0_201 ) { - D(fprintf(stderr, "%*c+ _gather_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c+ _gather_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_198[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c%s _gather_200[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); } _res = NULL; done: @@ -36215,9 +36592,9 @@ _gather_198_rule(Parser *p) return _res; } -// _loop0_201: ',' (expressions ['as' star_target]) +// _loop0_203: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_201_rule(Parser *p) +_loop0_203_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36244,13 +36621,13 @@ _loop0_201_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_240_rule(p)) // expressions ['as' star_target] + (elem = _tmp_245_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -36275,7 +36652,7 @@ _loop0_201_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_201[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_203[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36288,14 +36665,14 @@ _loop0_201_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_201_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_203_type, _seq); p->level--; return _seq; } -// _gather_200: (expressions ['as' star_target]) _loop0_201 +// _gather_202: (expressions ['as' star_target]) _loop0_203 static asdl_seq * -_gather_200_rule(Parser *p) +_gather_202_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36307,27 +36684,27 @@ _gather_200_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_201 + { // (expressions ['as' star_target]) _loop0_203 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c> _gather_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_240_rule(p)) // expressions ['as' star_target] + (elem = _tmp_245_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_201_rule(p)) // _loop0_201 + (seq = _loop0_203_rule(p)) // _loop0_203 ) { - D(fprintf(stderr, "%*c+ _gather_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c+ _gather_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_200[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c%s _gather_202[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); } _res = NULL; done: @@ -36335,9 +36712,9 @@ _gather_200_rule(Parser *p) return _res; } -// _tmp_202: 'except' | 'finally' +// _tmp_204: 'except' | 'finally' static void * -_tmp_202_rule(Parser *p) +_tmp_204_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36354,18 +36731,18 @@ _tmp_202_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' ) { - D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except'")); } { // 'finally' @@ -36373,18 +36750,18 @@ _tmp_202_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 632)) // token='finally' ) { - D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally'")); } _res = NULL; @@ -36393,9 +36770,9 @@ _tmp_202_rule(Parser *p) return _res; } -// _loop0_203: block +// _loop0_205: block static asdl_seq * -_loop0_203_rule(Parser *p) +_loop0_205_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36422,7 +36799,7 @@ _loop0_203_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -36444,7 +36821,7 @@ _loop0_203_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_203[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36457,14 +36834,14 @@ _loop0_203_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_203_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_205_type, _seq); p->level--; return _seq; } -// _tmp_204: (except_block+ except_star_block) | (except_star_block+ except_block) -static void * -_tmp_204_rule(Parser *p) +// _loop1_206: except_block +static asdl_seq * +_loop1_206_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36474,45 +36851,103 @@ _tmp_204_rule(Parser *p) p->level--; return NULL; } - void * _res = NULL; + void *_res = NULL; int _mark = p->mark; - { // (except_block+ except_star_block) + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // except_block if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(except_block+ except_star_block)")); - void *_tmp_241_var; - if ( - (_tmp_241_var = _tmp_241_rule(p)) // except_block+ except_star_block + D(fprintf(stderr, "%*c> _loop1_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); + excepthandler_ty except_block_var; + while ( + (except_block_var = except_block_rule(p)) // except_block ) { - D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(except_block+ except_star_block)")); - _res = _tmp_241_var; - goto done; + _res = except_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(except_block+ except_star_block)")); + D(fprintf(stderr, "%*c%s _loop1_206[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; } - { // (except_star_block+ except_block) + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_206_type, _seq); + p->level--; + return _seq; +} + +// _tmp_207: 'as' NAME +static void * +_tmp_207_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'as' NAME if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(except_star_block+ except_block)")); - void *_tmp_242_var; + D(fprintf(stderr, "%*c> _tmp_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + Token * _keyword; + expr_ty name_var; if ( - (_tmp_242_var = _tmp_242_rule(p)) // except_star_block+ except_block + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + && + (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(except_star_block+ except_block)")); - _res = _tmp_242_var; + D(fprintf(stderr, "%*c+ _tmp_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(except_star_block+ except_block)")); + D(fprintf(stderr, "%*c%s _tmp_207[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; done: @@ -36520,9 +36955,9 @@ _tmp_204_rule(Parser *p) return _res; } -// _loop0_205: block +// _loop0_208: block static asdl_seq * -_loop0_205_rule(Parser *p) +_loop0_208_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36549,7 +36984,7 @@ _loop0_205_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -36571,7 +37006,7 @@ _loop0_205_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36584,14 +37019,131 @@ _loop0_205_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_205_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_208_type, _seq); p->level--; return _seq; } -// _tmp_206: 'as' NAME +// _loop1_209: except_star_block +static asdl_seq * +_loop1_209_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // except_star_block + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); + excepthandler_ty except_star_block_var; + while ( + (except_star_block_var = except_star_block_rule(p)) // except_star_block + ) + { + _res = except_star_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_209[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_209_type, _seq); + p->level--; + return _seq; +} + +// _tmp_210: expression ['as' NAME] static void * -_tmp_206_rule(Parser *p) +_tmp_210_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // expression ['as' NAME] + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_246_rule(p), !p->error_indicator) // ['as' NAME] + ) + { + D(fprintf(stderr, "%*c+ _tmp_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + _res = _PyPegen_dummy_name(p, expression_var, _opt_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_210[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' NAME]")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_211: 'as' NAME +static void * +_tmp_211_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36608,21 +37160,21 @@ _tmp_206_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_206[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_206[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_211[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36631,9 +37183,9 @@ _tmp_206_rule(Parser *p) return _res; } -// _tmp_207: 'as' NAME +// _tmp_212: 'as' NAME static void * -_tmp_207_rule(Parser *p) +_tmp_212_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36650,21 +37202,21 @@ _tmp_207_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_207[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36673,9 +37225,9 @@ _tmp_207_rule(Parser *p) return _res; } -// _tmp_208: NEWLINE | ':' +// _tmp_213: NEWLINE | ':' static void * -_tmp_208_rule(Parser *p) +_tmp_213_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36692,18 +37244,18 @@ _tmp_208_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_208[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); _res = newline_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_208[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } { // ':' @@ -36711,18 +37263,18 @@ _tmp_208_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_208[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_208[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -36731,9 +37283,9 @@ _tmp_208_rule(Parser *p) return _res; } -// _tmp_209: 'as' NAME +// _tmp_214: 'as' NAME static void * -_tmp_209_rule(Parser *p) +_tmp_214_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36750,21 +37302,21 @@ _tmp_209_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36773,9 +37325,9 @@ _tmp_209_rule(Parser *p) return _res; } -// _tmp_210: 'as' NAME +// _tmp_215: 'as' NAME static void * -_tmp_210_rule(Parser *p) +_tmp_215_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36792,21 +37344,21 @@ _tmp_210_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_210[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36815,9 +37367,9 @@ _tmp_210_rule(Parser *p) return _res; } -// _tmp_211: positional_patterns ',' +// _tmp_216: positional_patterns ',' static void * -_tmp_211_rule(Parser *p) +_tmp_216_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36834,7 +37386,7 @@ _tmp_211_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); Token * _literal; asdl_pattern_seq* positional_patterns_var; if ( @@ -36843,12 +37395,12 @@ _tmp_211_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); _res = _PyPegen_dummy_name(p, positional_patterns_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_211[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "positional_patterns ','")); } _res = NULL; @@ -36857,9 +37409,9 @@ _tmp_211_rule(Parser *p) return _res; } -// _tmp_212: '->' expression +// _tmp_217: '->' expression static void * -_tmp_212_rule(Parser *p) +_tmp_217_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36876,7 +37428,7 @@ _tmp_212_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty expression_var; if ( @@ -36885,12 +37437,12 @@ _tmp_212_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = _PyPegen_dummy_name(p, _literal, expression_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -36899,9 +37451,9 @@ _tmp_212_rule(Parser *p) return _res; } -// _tmp_213: '(' arguments? ')' +// _tmp_218: '(' arguments? ')' static void * -_tmp_213_rule(Parser *p) +_tmp_218_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36918,7 +37470,7 @@ _tmp_213_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -36931,12 +37483,12 @@ _tmp_213_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -36945,9 +37497,9 @@ _tmp_213_rule(Parser *p) return _res; } -// _tmp_214: '(' arguments? ')' +// _tmp_219: '(' arguments? ')' static void * -_tmp_214_rule(Parser *p) +_tmp_219_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36964,7 +37516,7 @@ _tmp_214_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -36977,12 +37529,12 @@ _tmp_214_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -36991,9 +37543,9 @@ _tmp_214_rule(Parser *p) return _res; } -// _loop0_216: ',' double_starred_kvpair +// _loop0_221: ',' double_starred_kvpair static asdl_seq * -_loop0_216_rule(Parser *p) +_loop0_221_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37020,7 +37572,7 @@ _loop0_216_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); + D(fprintf(stderr, "%*c> _loop0_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); Token * _literal; KeyValuePair* elem; while ( @@ -37051,7 +37603,7 @@ _loop0_216_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_216[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_221[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37064,14 +37616,14 @@ _loop0_216_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_216_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_221_type, _seq); p->level--; return _seq; } -// _gather_215: double_starred_kvpair _loop0_216 +// _gather_220: double_starred_kvpair _loop0_221 static asdl_seq * -_gather_215_rule(Parser *p) +_gather_220_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37083,27 +37635,27 @@ _gather_215_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // double_starred_kvpair _loop0_216 + { // double_starred_kvpair _loop0_221 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_216")); + D(fprintf(stderr, "%*c> _gather_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221")); KeyValuePair* elem; asdl_seq * seq; if ( (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair && - (seq = _loop0_216_rule(p)) // _loop0_216 + (seq = _loop0_221_rule(p)) // _loop0_221 ) { - D(fprintf(stderr, "%*c+ _gather_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_216")); + D(fprintf(stderr, "%*c+ _gather_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_215[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_216")); + D(fprintf(stderr, "%*c%s _gather_220[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_221")); } _res = NULL; done: @@ -37111,9 +37663,9 @@ _gather_215_rule(Parser *p) return _res; } -// _tmp_217: '}' | ',' +// _tmp_222: '}' | ',' static void * -_tmp_217_rule(Parser *p) +_tmp_222_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37130,18 +37682,18 @@ _tmp_217_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -37149,18 +37701,18 @@ _tmp_217_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37169,9 +37721,9 @@ _tmp_217_rule(Parser *p) return _res; } -// _tmp_218: '}' | ',' +// _tmp_223: '}' | ',' static void * -_tmp_218_rule(Parser *p) +_tmp_223_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37188,18 +37740,18 @@ _tmp_218_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -37207,18 +37759,18 @@ _tmp_218_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37227,9 +37779,9 @@ _tmp_218_rule(Parser *p) return _res; } -// _tmp_219: star_targets '=' +// _tmp_224: star_targets '=' static void * -_tmp_219_rule(Parser *p) +_tmp_224_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37246,7 +37798,7 @@ _tmp_219_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -37255,7 +37807,7 @@ _tmp_219_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37265,7 +37817,7 @@ _tmp_219_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -37274,9 +37826,9 @@ _tmp_219_rule(Parser *p) return _res; } -// _tmp_220: '.' | '...' +// _tmp_225: '.' | '...' static void * -_tmp_220_rule(Parser *p) +_tmp_225_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37293,18 +37845,18 @@ _tmp_220_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -37312,18 +37864,18 @@ _tmp_220_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -37332,9 +37884,9 @@ _tmp_220_rule(Parser *p) return _res; } -// _tmp_221: '.' | '...' +// _tmp_226: '.' | '...' static void * -_tmp_221_rule(Parser *p) +_tmp_226_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37351,18 +37903,18 @@ _tmp_221_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -37370,18 +37922,18 @@ _tmp_221_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -37390,9 +37942,9 @@ _tmp_221_rule(Parser *p) return _res; } -// _tmp_222: '@' named_expression NEWLINE +// _tmp_227: '@' named_expression NEWLINE static void * -_tmp_222_rule(Parser *p) +_tmp_227_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37409,7 +37961,7 @@ _tmp_222_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -37421,7 +37973,7 @@ _tmp_222_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37431,7 +37983,7 @@ _tmp_222_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -37440,9 +37992,9 @@ _tmp_222_rule(Parser *p) return _res; } -// _tmp_223: ',' expression +// _tmp_228: ',' expression static void * -_tmp_223_rule(Parser *p) +_tmp_228_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37459,7 +38011,7 @@ _tmp_223_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -37468,7 +38020,7 @@ _tmp_223_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37478,7 +38030,7 @@ _tmp_223_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -37487,9 +38039,9 @@ _tmp_223_rule(Parser *p) return _res; } -// _tmp_224: ',' star_expression +// _tmp_229: ',' star_expression static void * -_tmp_224_rule(Parser *p) +_tmp_229_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37506,7 +38058,7 @@ _tmp_224_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -37515,7 +38067,7 @@ _tmp_224_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37525,7 +38077,7 @@ _tmp_224_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -37534,9 +38086,9 @@ _tmp_224_rule(Parser *p) return _res; } -// _tmp_225: 'or' conjunction +// _tmp_230: 'or' conjunction static void * -_tmp_225_rule(Parser *p) +_tmp_230_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37553,7 +38105,7 @@ _tmp_225_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -37562,7 +38114,7 @@ _tmp_225_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37572,7 +38124,7 @@ _tmp_225_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -37581,9 +38133,9 @@ _tmp_225_rule(Parser *p) return _res; } -// _tmp_226: 'and' inversion +// _tmp_231: 'and' inversion static void * -_tmp_226_rule(Parser *p) +_tmp_231_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37600,7 +38152,7 @@ _tmp_226_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -37609,7 +38161,7 @@ _tmp_226_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37619,7 +38171,7 @@ _tmp_226_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -37628,9 +38180,9 @@ _tmp_226_rule(Parser *p) return _res; } -// _tmp_227: slice | starred_expression +// _tmp_232: slice | starred_expression static void * -_tmp_227_rule(Parser *p) +_tmp_232_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37647,18 +38199,18 @@ _tmp_227_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); expr_ty slice_var; if ( (slice_var = slice_rule(p)) // slice ) { - D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); _res = slice_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice")); } { // starred_expression @@ -37666,18 +38218,18 @@ _tmp_227_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } _res = NULL; @@ -37686,9 +38238,9 @@ _tmp_227_rule(Parser *p) return _res; } -// _tmp_228: 'if' disjunction +// _tmp_233: 'if' disjunction static void * -_tmp_228_rule(Parser *p) +_tmp_233_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37705,16 +38257,16 @@ _tmp_228_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37724,7 +38276,7 @@ _tmp_228_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -37733,9 +38285,9 @@ _tmp_228_rule(Parser *p) return _res; } -// _tmp_229: 'if' disjunction +// _tmp_234: 'if' disjunction static void * -_tmp_229_rule(Parser *p) +_tmp_234_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37752,16 +38304,16 @@ _tmp_229_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37771,7 +38323,7 @@ _tmp_229_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -37780,9 +38332,9 @@ _tmp_229_rule(Parser *p) return _res; } -// _tmp_230: starred_expression | (assignment_expression | expression !':=') !'=' +// _tmp_235: starred_expression | (assignment_expression | expression !':=') !'=' static void * -_tmp_230_rule(Parser *p) +_tmp_235_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37799,18 +38351,18 @@ _tmp_230_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // (assignment_expression | expression !':=') !'=' @@ -37818,20 +38370,20 @@ _tmp_230_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - void *_tmp_243_var; + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + void *_tmp_247_var; if ( - (_tmp_243_var = _tmp_243_rule(p)) // assignment_expression | expression !':=' + (_tmp_247_var = _tmp_247_rule(p)) // assignment_expression | expression !':=' && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - _res = _tmp_243_var; + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + _res = _tmp_247_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='")); } _res = NULL; @@ -37840,9 +38392,9 @@ _tmp_230_rule(Parser *p) return _res; } -// _tmp_231: ',' star_target +// _tmp_236: ',' star_target static void * -_tmp_231_rule(Parser *p) +_tmp_236_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37859,7 +38411,7 @@ _tmp_231_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -37868,7 +38420,7 @@ _tmp_231_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37878,7 +38430,7 @@ _tmp_231_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -37887,9 +38439,9 @@ _tmp_231_rule(Parser *p) return _res; } -// _tmp_232: ',' star_target +// _tmp_237: ',' star_target static void * -_tmp_232_rule(Parser *p) +_tmp_237_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37906,7 +38458,7 @@ _tmp_232_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -37915,7 +38467,7 @@ _tmp_232_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37925,7 +38477,7 @@ _tmp_232_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -37934,9 +38486,9 @@ _tmp_232_rule(Parser *p) return _res; } -// _tmp_233: star_targets '=' +// _tmp_238: star_targets '=' static void * -_tmp_233_rule(Parser *p) +_tmp_238_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37953,7 +38505,7 @@ _tmp_233_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -37962,12 +38514,12 @@ _tmp_233_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -37976,9 +38528,9 @@ _tmp_233_rule(Parser *p) return _res; } -// _tmp_234: star_targets '=' +// _tmp_239: star_targets '=' static void * -_tmp_234_rule(Parser *p) +_tmp_239_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37995,7 +38547,7 @@ _tmp_234_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -38004,12 +38556,12 @@ _tmp_234_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -38018,9 +38570,9 @@ _tmp_234_rule(Parser *p) return _res; } -// _tmp_235: ')' | '**' +// _tmp_240: ')' | '**' static void * -_tmp_235_rule(Parser *p) +_tmp_240_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38037,18 +38589,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -38056,18 +38608,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -38076,9 +38628,9 @@ _tmp_235_rule(Parser *p) return _res; } -// _tmp_236: ':' | '**' +// _tmp_241: ':' | '**' static void * -_tmp_236_rule(Parser *p) +_tmp_241_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38095,18 +38647,18 @@ _tmp_236_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -38114,18 +38666,18 @@ _tmp_236_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -38134,9 +38686,9 @@ _tmp_236_rule(Parser *p) return _res; } -// _tmp_237: expression ['as' star_target] +// _tmp_242: expression ['as' star_target] static void * -_tmp_237_rule(Parser *p) +_tmp_242_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38153,22 +38705,22 @@ _tmp_237_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_244_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_248_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -38177,9 +38729,9 @@ _tmp_237_rule(Parser *p) return _res; } -// _tmp_238: expressions ['as' star_target] +// _tmp_243: expressions ['as' star_target] static void * -_tmp_238_rule(Parser *p) +_tmp_243_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38196,22 +38748,22 @@ _tmp_238_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_245_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_249_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -38220,9 +38772,9 @@ _tmp_238_rule(Parser *p) return _res; } -// _tmp_239: expression ['as' star_target] +// _tmp_244: expression ['as' star_target] static void * -_tmp_239_rule(Parser *p) +_tmp_244_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38239,22 +38791,22 @@ _tmp_239_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_246_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_250_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -38263,9 +38815,9 @@ _tmp_239_rule(Parser *p) return _res; } -// _tmp_240: expressions ['as' star_target] +// _tmp_245: expressions ['as' star_target] static void * -_tmp_240_rule(Parser *p) +_tmp_245_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38282,22 +38834,22 @@ _tmp_240_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_247_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_251_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -38306,51 +38858,9 @@ _tmp_240_rule(Parser *p) return _res; } -// _tmp_241: except_block+ except_star_block -static void * -_tmp_241_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // except_block+ except_star_block - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block+ except_star_block")); - asdl_seq * _loop1_248_var; - excepthandler_ty except_star_block_var; - if ( - (_loop1_248_var = _loop1_248_rule(p)) // except_block+ - && - (except_star_block_var = except_star_block_rule(p)) // except_star_block - ) - { - D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "except_block+ except_star_block")); - _res = _PyPegen_dummy_name(p, _loop1_248_var, except_star_block_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block+ except_star_block")); - } - _res = NULL; - done: - p->level--; - return _res; -} - -// _tmp_242: except_star_block+ except_block +// _tmp_246: 'as' NAME static void * -_tmp_242_rule(Parser *p) +_tmp_246_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38362,27 +38872,27 @@ _tmp_242_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // except_star_block+ except_block + { // 'as' NAME if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block+ except_block")); - asdl_seq * _loop1_249_var; - excepthandler_ty except_block_var; + D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + Token * _keyword; + expr_ty name_var; if ( - (_loop1_249_var = _loop1_249_rule(p)) // except_star_block+ + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && - (except_block_var = except_block_rule(p)) // except_block + (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "except_star_block+ except_block")); - _res = _PyPegen_dummy_name(p, _loop1_249_var, except_block_var); + D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block+ except_block")); + D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; done: @@ -38390,9 +38900,9 @@ _tmp_242_rule(Parser *p) return _res; } -// _tmp_243: assignment_expression | expression !':=' +// _tmp_247: assignment_expression | expression !':=' static void * -_tmp_243_rule(Parser *p) +_tmp_247_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38409,18 +38919,18 @@ _tmp_243_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -38428,7 +38938,7 @@ _tmp_243_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -38436,12 +38946,12 @@ _tmp_243_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; @@ -38450,9 +38960,9 @@ _tmp_243_rule(Parser *p) return _res; } -// _tmp_244: 'as' star_target +// _tmp_248: 'as' star_target static void * -_tmp_244_rule(Parser *p) +_tmp_248_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38469,21 +38979,21 @@ _tmp_244_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38492,9 +39002,9 @@ _tmp_244_rule(Parser *p) return _res; } -// _tmp_245: 'as' star_target +// _tmp_249: 'as' star_target static void * -_tmp_245_rule(Parser *p) +_tmp_249_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38511,21 +39021,21 @@ _tmp_245_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38534,9 +39044,9 @@ _tmp_245_rule(Parser *p) return _res; } -// _tmp_246: 'as' star_target +// _tmp_250: 'as' star_target static void * -_tmp_246_rule(Parser *p) +_tmp_250_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38553,21 +39063,21 @@ _tmp_246_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38576,9 +39086,9 @@ _tmp_246_rule(Parser *p) return _res; } -// _tmp_247: 'as' star_target +// _tmp_251: 'as' star_target static void * -_tmp_247_rule(Parser *p) +_tmp_251_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38595,21 +39105,21 @@ _tmp_247_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38618,154 +39128,6 @@ _tmp_247_rule(Parser *p) return _res; } -// _loop1_248: except_block -static asdl_seq * -_loop1_248_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - Py_ssize_t _children_capacity = 1; - Py_ssize_t _n = 0; - { // except_block - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); - excepthandler_ty except_block_var; - while ( - (except_block_var = except_block_rule(p)) // except_block - ) - { - _res = except_block_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_248[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; - } - asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_248_type, _seq); - p->level--; - return _seq; -} - -// _loop1_249: except_star_block -static asdl_seq * -_loop1_249_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - Py_ssize_t _children_capacity = 1; - Py_ssize_t _n = 0; - { // except_star_block - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); - excepthandler_ty except_star_block_var; - while ( - (except_star_block_var = except_star_block_rule(p)) // except_star_block - ) - { - _res = except_star_block_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_249[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; - } - asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_249_type, _seq); - p->level--; - return _seq; -} - void * _PyPegen_parse(Parser *p) { diff --git a/Parser/pegen.c b/Parser/pegen.c index 1317606749b8c0..d84e06861edefc 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -246,8 +246,8 @@ _PyPegen_fill_token(Parser *p) // The array counts the number of tokens skipped by memoization, // indexed by type. -#define NSTATISTICS 2000 -static long memo_statistics[NSTATISTICS]; +#define NSTATISTICS _PYPEGEN_NSTATISTICS +#define memo_statistics _PyRuntime.parser.memo_statistics void _PyPegen_clear_memo_statistics() @@ -885,8 +885,7 @@ _PyPegen_run_parser_from_file_pointer(FILE *fp, int start_rule, PyObject *filena tok->fp_interactive = 1; } // This transfers the ownership to the tokenizer - tok->filename = filename_ob; - Py_INCREF(filename_ob); + tok->filename = Py_NewRef(filename_ob); // From here on we need to clean up even if there's an error mod_ty result = NULL; @@ -925,8 +924,7 @@ _PyPegen_run_parser_from_string(const char *str, int start_rule, PyObject *filen return NULL; } // This transfers the ownership to the tokenizer - tok->filename = filename_ob; - Py_INCREF(filename_ob); + tok->filename = Py_NewRef(filename_ob); // We need to clear up from here on mod_ty result = NULL; diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c index 7738cbaf9ef39e..6ea7600119b643 100644 --- a/Parser/pegen_errors.c +++ b/Parser/pegen_errors.c @@ -169,6 +169,10 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) { for (;;) { switch (_PyTokenizer_Get(p->tok, &new_token)) { case ERRORTOKEN: + if (PyErr_Occurred()) { + ret = -1; + goto exit; + } if (p->tok->level != 0) { int error_lineno = p->tok->parenlinenostack[p->tok->level-1]; if (current_err_line > error_lineno) { diff --git a/Parser/string_parser.c b/Parser/string_parser.c index 9bc3b082136be5..c096bea7426e5c 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -21,9 +21,16 @@ warn_invalid_escape_sequence(Parser *p, const char *first_invalid_escape, Token if (msg == NULL) { return -1; } - if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, p->tok->filename, + PyObject *category; + if (p->feature_version >= 12) { + category = PyExc_SyntaxWarning; + } + else { + category = PyExc_DeprecationWarning; + } + if (PyErr_WarnExplicitObject(category, msg, p->tok->filename, t->lineno, NULL, NULL) < 0) { - if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { + if (PyErr_ExceptionMatches(category)) { /* Replace the DeprecationWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); @@ -410,9 +417,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, PyMem_Free(str); return NULL; } - Py_INCREF(p->tok->filename); - - tok->filename = p->tok->filename; + tok->filename = Py_NewRef(p->tok->filename); tok->lineno = t->lineno + lines - 1; Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version, diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 1c356d3d47c945..463c0e00ca1411 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -97,6 +97,7 @@ tok_new(void) tok->async_def_nl = 0; tok->interactive_underflow = IUNDERFLOW_NORMAL; tok->str = NULL; + tok->report_warnings = 1; #ifdef Py_DEBUG tok->debug = _Py_GetConfig()->parser_debug; #endif @@ -413,7 +414,11 @@ tok_readline_recode(struct tok_state *tok) { error_ret(tok); goto error; } - if (!tok_reserve_buf(tok, buflen + 1)) { + // Make room for the null terminator *and* potentially + // an extra newline character that we may need to artificially + // add. + size_t buffer_size = buflen + 2; + if (!tok_reserve_buf(tok, buffer_size)) { goto error; } memcpy(tok->inp, buf, buflen); @@ -1000,6 +1005,7 @@ tok_underflow_file(struct tok_state *tok) { return 0; } if (tok->inp[-1] != '\n') { + assert(tok->inp + 1 < tok->end); /* Last line does not end in \n, fake one */ *tok->inp++ = '\n'; *tok->inp = '\0'; @@ -1196,6 +1202,10 @@ indenterror(struct tok_state *tok) static int parser_warn(struct tok_state *tok, PyObject *category, const char *format, ...) { + if (!tok->report_warnings) { + return 0; + } + PyObject *errmsg; va_list vargs; va_start(vargs, format); @@ -2223,8 +2233,7 @@ _PyTokenizer_FindEncodingFilename(int fd, PyObject *filename) return NULL; } if (filename != NULL) { - Py_INCREF(filename); - tok->filename = filename; + tok->filename = Py_NewRef(filename); } else { tok->filename = PyUnicode_FromString("<string>"); @@ -2235,6 +2244,9 @@ _PyTokenizer_FindEncodingFilename(int fd, PyObject *filename) } } struct token token; + // We don't want to report warnings here because it could cause infinite recursion + // if fetching the encoding shows a warning. + tok->report_warnings = 0; while (tok->lineno < 2 && tok->done == E_OK) { _PyTokenizer_Get(tok, &token); } diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index 2542d30e1da0ed..16a94d5f51d664 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -92,6 +92,7 @@ struct tok_state { NEWLINE token after it. */ /* How to proceed when asked for a new token in interactive mode */ enum interactive_underflow_t interactive_underflow; + int report_warnings; #ifdef Py_DEBUG int debug; #endif diff --git a/Programs/_bootstrap_python.c b/Programs/_bootstrap_python.c index bbac0c4e1a8a45..6c388fc7033dd0 100644 --- a/Programs/_bootstrap_python.c +++ b/Programs/_bootstrap_python.c @@ -12,8 +12,11 @@ /* Includes for frozen modules: */ #include "Python/frozen_modules/importlib._bootstrap.h" #include "Python/frozen_modules/importlib._bootstrap_external.h" +#include "Python/frozen_modules/zipimport.h" /* End includes */ +uint32_t _Py_next_func_version = 1; + /* Empty initializer for deepfrozen modules */ int _Py_Deepfreeze_Init(void) { @@ -30,6 +33,7 @@ _Py_Deepfreeze_Fini(void) static const struct _frozen bootstrap_modules[] = { {"_frozen_importlib", _Py_M__importlib__bootstrap, (int)sizeof(_Py_M__importlib__bootstrap)}, {"_frozen_importlib_external", _Py_M__importlib__bootstrap_external, (int)sizeof(_Py_M__importlib__bootstrap_external)}, + {"zipimport", _Py_M__zipimport, (int)sizeof(_Py_M__zipimport)}, {0, 0, 0} /* bootstrap sentinel */ }; static const struct _frozen stdlib_modules[] = { diff --git a/Programs/_freeze_module.c b/Programs/_freeze_module.c index d6d737d48d6070..90fc2dc6e87da8 100644 --- a/Programs/_freeze_module.c +++ b/Programs/_freeze_module.c @@ -9,6 +9,7 @@ Keep this file in sync with Programs/_freeze_module.py. */ + #include <Python.h> #include <marshal.h> #include "pycore_fileutils.h" // _Py_stat_struct @@ -22,6 +23,8 @@ #include <unistd.h> #endif +uint32_t _Py_next_func_version = 1; + /* Empty initializer for deepfrozen modules */ int _Py_Deepfreeze_Init(void) { @@ -194,6 +197,7 @@ write_frozen(const char *outpath, const char *inpath, const char *name, if (ferror(outfile)) { fprintf(stderr, "error when writing to '%s'\n", outpath); + fclose(outfile); return -1; } fclose(outfile); diff --git a/Programs/_testembed.c b/Programs/_testembed.c index adb4483ccbd3c0..a6ce3f7b200550 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -1,7 +1,6 @@ #ifndef Py_BUILD_CORE_MODULE # define Py_BUILD_CORE_MODULE #endif -#define NEEDS_PY_IDENTIFIER /* Always enable assertion (even in release mode) */ #undef NDEBUG @@ -1891,7 +1890,14 @@ static int test_unicode_id_init(void) { // bpo-42882: Test that _PyUnicode_FromId() works // when Python is initialized multiples times. - _Py_IDENTIFIER(test_unicode_id_init); + + // This is equivalent to `_Py_IDENTIFIER(test_unicode_id_init)` + // but since `_Py_IDENTIFIER` is disabled when `Py_BUILD_CORE` + // is defined, it is manually expanded here. + static _Py_Identifier PyId_test_unicode_id_init = { + .string = "test_unicode_id_init", + .index = -1, + }; // Initialize Python once without using the identifier _testembed_Py_InitializeFromConfig(); diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h index 96be3ce3c25c3f..f365c12adce7a8 100644 --- a/Programs/test_frozenmain.h +++ b/Programs/test_frozenmain.h @@ -1,42 +1,48 @@ // Auto-generated by Programs/freeze_test_frozenmain.py unsigned char M_test_frozenmain[] = { 227,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0, - 0,0,0,0,0,243,184,0,0,0,151,0,100,0,100,1, - 108,0,90,0,100,0,100,1,108,1,90,1,2,0,101,2, - 100,2,171,1,0,0,0,0,0,0,0,0,1,0,2,0, - 101,2,100,3,101,0,106,6,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,171,2,0,0,0,0, - 0,0,0,0,1,0,2,0,101,1,106,8,0,0,0,0, + 0,0,0,0,0,243,26,1,0,0,151,0,0,0,100,0, + 0,0,100,1,0,0,108,0,0,0,90,0,0,0,100,0, + 0,0,100,1,0,0,108,1,0,0,90,1,0,0,2,0, + 0,0,101,2,0,0,100,2,0,0,171,1,0,0,0,0, + 0,0,0,0,0,0,1,0,0,0,2,0,0,0,101,2, + 0,0,100,3,0,0,101,0,0,0,106,6,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 171,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0, + 2,0,0,0,101,1,0,0,106,8,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,171,0, - 0,0,0,0,0,0,0,0,100,4,25,0,0,0,0,0, - 0,0,0,0,90,5,100,5,68,0,93,23,0,0,90,6, - 2,0,101,2,100,6,101,6,155,0,100,7,101,5,101,6, - 25,0,0,0,0,0,0,0,0,0,155,0,157,4,171,1, - 0,0,0,0,0,0,0,0,1,0,140,25,4,0,100,1, - 83,0,41,8,233,0,0,0,0,78,122,18,70,114,111,122, - 101,110,32,72,101,108,108,111,32,87,111,114,108,100,122,8, - 115,121,115,46,97,114,103,118,218,6,99,111,110,102,105,103, - 41,5,218,12,112,114,111,103,114,97,109,95,110,97,109,101, - 218,10,101,120,101,99,117,116,97,98,108,101,218,15,117,115, - 101,95,101,110,118,105,114,111,110,109,101,110,116,218,17,99, - 111,110,102,105,103,117,114,101,95,99,95,115,116,100,105,111, - 218,14,98,117,102,102,101,114,101,100,95,115,116,100,105,111, - 122,7,99,111,110,102,105,103,32,122,2,58,32,41,7,218, - 3,115,121,115,218,17,95,116,101,115,116,105,110,116,101,114, - 110,97,108,99,97,112,105,218,5,112,114,105,110,116,218,4, - 97,114,103,118,218,11,103,101,116,95,99,111,110,102,105,103, - 115,114,3,0,0,0,218,3,107,101,121,169,0,243,0,0, - 0,0,250,18,116,101,115,116,95,102,114,111,122,101,110,109, - 97,105,110,46,112,121,250,8,60,109,111,100,117,108,101,62, - 114,18,0,0,0,1,0,0,0,115,154,0,0,0,240,3, - 1,1,1,240,8,0,1,11,128,10,128,10,128,10,216,0, - 24,208,0,24,208,0,24,208,0,24,224,0,5,128,5,208, - 6,26,212,0,27,208,0,27,216,0,5,128,5,128,106,144, - 35,151,40,145,40,212,0,27,208,0,27,216,9,38,208,9, - 26,215,9,38,209,9,38,212,9,40,168,24,212,9,50,128, - 6,240,2,6,12,2,240,0,7,1,42,241,0,7,1,42, - 128,67,240,14,0,5,10,128,69,208,10,40,144,67,208,10, - 40,208,10,40,152,54,160,35,156,59,208,10,40,208,10,40, - 212,4,41,208,4,41,208,4,41,240,15,7,1,42,240,0, - 7,1,42,240,0,7,1,42,114,16,0,0,0, + 0,0,0,0,0,0,0,0,0,0,100,4,0,0,25,0, + 0,0,0,0,0,0,0,0,0,0,90,5,0,0,100,5, + 0,0,68,0,0,0,93,38,0,0,0,0,90,6,0,0, + 2,0,0,0,101,2,0,0,100,6,0,0,101,6,0,0, + 155,0,0,0,100,7,0,0,101,5,0,0,101,6,0,0, + 25,0,0,0,0,0,0,0,0,0,0,0,155,0,0,0, + 157,4,0,0,171,1,0,0,0,0,0,0,0,0,0,0, + 1,0,0,0,140,41,0,0,4,0,0,0,100,1,0,0, + 83,0,0,0,41,8,233,0,0,0,0,78,122,18,70,114, + 111,122,101,110,32,72,101,108,108,111,32,87,111,114,108,100, + 122,8,115,121,115,46,97,114,103,118,218,6,99,111,110,102, + 105,103,41,5,218,12,112,114,111,103,114,97,109,95,110,97, + 109,101,218,10,101,120,101,99,117,116,97,98,108,101,218,15, + 117,115,101,95,101,110,118,105,114,111,110,109,101,110,116,218, + 17,99,111,110,102,105,103,117,114,101,95,99,95,115,116,100, + 105,111,218,14,98,117,102,102,101,114,101,100,95,115,116,100, + 105,111,122,7,99,111,110,102,105,103,32,122,2,58,32,41, + 7,218,3,115,121,115,218,17,95,116,101,115,116,105,110,116, + 101,114,110,97,108,99,97,112,105,218,5,112,114,105,110,116, + 218,4,97,114,103,118,218,11,103,101,116,95,99,111,110,102, + 105,103,115,114,3,0,0,0,218,3,107,101,121,169,0,243, + 0,0,0,0,250,18,116,101,115,116,95,102,114,111,122,101, + 110,109,97,105,110,46,112,121,250,8,60,109,111,100,117,108, + 101,62,114,18,0,0,0,1,0,0,0,115,154,0,0,0, + 241,3,1,1,1,241,8,0,1,11,129,10,129,10,129,10, + 217,0,24,209,0,24,209,0,24,209,0,24,225,0,5,129, + 5,209,6,26,213,0,27,209,0,27,217,0,5,129,5,129, + 106,145,35,151,40,146,40,213,0,27,209,0,27,217,9,38, + 209,9,26,215,9,38,210,9,38,213,9,40,169,24,213,9, + 50,129,6,241,2,6,12,2,241,0,7,1,42,242,0,7, + 1,42,129,67,241,14,0,5,10,129,69,209,10,40,145,67, + 209,10,40,209,10,40,153,54,161,35,157,59,209,10,40,209, + 10,40,213,4,41,209,4,41,209,4,41,241,15,7,1,42, + 241,0,7,1,42,241,0,7,1,42,114,16,0,0,0, }; diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 2571e28bc1690d..d113c47b95392e 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -263,6 +263,10 @@ void _PyAST_Fini(PyInterpreterState *interp) Py_CLEAR(state->vararg); Py_CLEAR(state->withitem_type); + if (_PyInterpreterState_Get() == _PyInterpreterState_Main()) { + Py_CLEAR(_Py_CACHED_OBJECT(str_replace_inf)); + } + #if !defined(NDEBUG) state->initialized = -1; #else @@ -993,10 +997,11 @@ static PyObject* ast2obj_list(struct ast_state *state, asdl_seq *seq, PyObject* static PyObject* ast2obj_object(struct ast_state *Py_UNUSED(state), void *o) { - if (!o) - o = Py_None; - Py_INCREF((PyObject*)o); - return (PyObject*)o; + PyObject *op = (PyObject*)o; + if (!op) { + op = Py_None; + } + return Py_NewRef(op); } #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object @@ -1018,9 +1023,11 @@ static int obj2ast_object(struct ast_state *Py_UNUSED(state), PyObject* obj, PyO *out = NULL; return -1; } - Py_INCREF(obj); + *out = Py_NewRef(obj); + } + else { + *out = NULL; } - *out = obj; return 0; } @@ -1030,8 +1037,7 @@ static int obj2ast_constant(struct ast_state *Py_UNUSED(state), PyObject* obj, P *out = NULL; return -1; } - Py_INCREF(obj); - *out = obj; + *out = Py_NewRef(obj); return 0; } @@ -4732,14 +4738,11 @@ PyObject* ast2obj_expr_context(struct ast_state *state, expr_context_ty o) { switch(o) { case Load: - Py_INCREF(state->Load_singleton); - return state->Load_singleton; + return Py_NewRef(state->Load_singleton); case Store: - Py_INCREF(state->Store_singleton); - return state->Store_singleton; + return Py_NewRef(state->Store_singleton); case Del: - Py_INCREF(state->Del_singleton); - return state->Del_singleton; + return Py_NewRef(state->Del_singleton); } Py_UNREACHABLE(); } @@ -4747,11 +4750,9 @@ PyObject* ast2obj_boolop(struct ast_state *state, boolop_ty o) { switch(o) { case And: - Py_INCREF(state->And_singleton); - return state->And_singleton; + return Py_NewRef(state->And_singleton); case Or: - Py_INCREF(state->Or_singleton); - return state->Or_singleton; + return Py_NewRef(state->Or_singleton); } Py_UNREACHABLE(); } @@ -4759,44 +4760,31 @@ PyObject* ast2obj_operator(struct ast_state *state, operator_ty o) { switch(o) { case Add: - Py_INCREF(state->Add_singleton); - return state->Add_singleton; + return Py_NewRef(state->Add_singleton); case Sub: - Py_INCREF(state->Sub_singleton); - return state->Sub_singleton; + return Py_NewRef(state->Sub_singleton); case Mult: - Py_INCREF(state->Mult_singleton); - return state->Mult_singleton; + return Py_NewRef(state->Mult_singleton); case MatMult: - Py_INCREF(state->MatMult_singleton); - return state->MatMult_singleton; + return Py_NewRef(state->MatMult_singleton); case Div: - Py_INCREF(state->Div_singleton); - return state->Div_singleton; + return Py_NewRef(state->Div_singleton); case Mod: - Py_INCREF(state->Mod_singleton); - return state->Mod_singleton; + return Py_NewRef(state->Mod_singleton); case Pow: - Py_INCREF(state->Pow_singleton); - return state->Pow_singleton; + return Py_NewRef(state->Pow_singleton); case LShift: - Py_INCREF(state->LShift_singleton); - return state->LShift_singleton; + return Py_NewRef(state->LShift_singleton); case RShift: - Py_INCREF(state->RShift_singleton); - return state->RShift_singleton; + return Py_NewRef(state->RShift_singleton); case BitOr: - Py_INCREF(state->BitOr_singleton); - return state->BitOr_singleton; + return Py_NewRef(state->BitOr_singleton); case BitXor: - Py_INCREF(state->BitXor_singleton); - return state->BitXor_singleton; + return Py_NewRef(state->BitXor_singleton); case BitAnd: - Py_INCREF(state->BitAnd_singleton); - return state->BitAnd_singleton; + return Py_NewRef(state->BitAnd_singleton); case FloorDiv: - Py_INCREF(state->FloorDiv_singleton); - return state->FloorDiv_singleton; + return Py_NewRef(state->FloorDiv_singleton); } Py_UNREACHABLE(); } @@ -4804,17 +4792,13 @@ PyObject* ast2obj_unaryop(struct ast_state *state, unaryop_ty o) { switch(o) { case Invert: - Py_INCREF(state->Invert_singleton); - return state->Invert_singleton; + return Py_NewRef(state->Invert_singleton); case Not: - Py_INCREF(state->Not_singleton); - return state->Not_singleton; + return Py_NewRef(state->Not_singleton); case UAdd: - Py_INCREF(state->UAdd_singleton); - return state->UAdd_singleton; + return Py_NewRef(state->UAdd_singleton); case USub: - Py_INCREF(state->USub_singleton); - return state->USub_singleton; + return Py_NewRef(state->USub_singleton); } Py_UNREACHABLE(); } @@ -4822,35 +4806,25 @@ PyObject* ast2obj_cmpop(struct ast_state *state, cmpop_ty o) { switch(o) { case Eq: - Py_INCREF(state->Eq_singleton); - return state->Eq_singleton; + return Py_NewRef(state->Eq_singleton); case NotEq: - Py_INCREF(state->NotEq_singleton); - return state->NotEq_singleton; + return Py_NewRef(state->NotEq_singleton); case Lt: - Py_INCREF(state->Lt_singleton); - return state->Lt_singleton; + return Py_NewRef(state->Lt_singleton); case LtE: - Py_INCREF(state->LtE_singleton); - return state->LtE_singleton; + return Py_NewRef(state->LtE_singleton); case Gt: - Py_INCREF(state->Gt_singleton); - return state->Gt_singleton; + return Py_NewRef(state->Gt_singleton); case GtE: - Py_INCREF(state->GtE_singleton); - return state->GtE_singleton; + return Py_NewRef(state->GtE_singleton); case Is: - Py_INCREF(state->Is_singleton); - return state->Is_singleton; + return Py_NewRef(state->Is_singleton); case IsNot: - Py_INCREF(state->IsNot_singleton); - return state->IsNot_singleton; + return Py_NewRef(state->IsNot_singleton); case In: - Py_INCREF(state->In_singleton); - return state->In_singleton; + return Py_NewRef(state->In_singleton); case NotIn: - Py_INCREF(state->NotIn_singleton); - return state->NotIn_singleton; + return Py_NewRef(state->NotIn_singleton); } Py_UNREACHABLE(); } @@ -5511,8 +5485,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Module' node")) { goto failed; } @@ -5548,8 +5521,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (type_ignores == NULL) goto failed; for (i = 0; i < len; i++) { type_ignore_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Module' node")) { goto failed; } @@ -5597,8 +5569,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Interactive' node")) { goto failed; } @@ -5676,8 +5647,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (argtypes == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'FunctionType' node")) { goto failed; } @@ -5873,8 +5843,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'FunctionDef' node")) { goto failed; } @@ -5910,8 +5879,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (decorator_list == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'FunctionDef' node")) { goto failed; } @@ -6034,8 +6002,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFunctionDef' node")) { goto failed; } @@ -6071,8 +6038,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (decorator_list == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFunctionDef' node")) { goto failed; } @@ -6178,8 +6144,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (bases == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6215,8 +6180,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (keywords == NULL) goto failed; for (i = 0; i < len; i++) { keyword_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6252,8 +6216,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6289,8 +6252,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (decorator_list == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6370,8 +6332,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (targets == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Delete' node")) { goto failed; } @@ -6422,8 +6383,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (targets == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Assign' node")) { goto failed; } @@ -6694,8 +6654,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'For' node")) { goto failed; } @@ -6731,8 +6690,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'For' node")) { goto failed; } @@ -6836,8 +6794,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFor' node")) { goto failed; } @@ -6873,8 +6830,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFor' node")) { goto failed; } @@ -6960,8 +6916,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'While' node")) { goto failed; } @@ -6997,8 +6952,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'While' node")) { goto failed; } @@ -7066,8 +7020,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'If' node")) { goto failed; } @@ -7103,8 +7056,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'If' node")) { goto failed; } @@ -7155,8 +7107,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (items == NULL) goto failed; for (i = 0; i < len; i++) { withitem_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'With' node")) { goto failed; } @@ -7192,8 +7143,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'With' node")) { goto failed; } @@ -7261,8 +7211,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (items == NULL) goto failed; for (i = 0; i < len; i++) { withitem_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncWith' node")) { goto failed; } @@ -7298,8 +7247,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncWith' node")) { goto failed; } @@ -7383,8 +7331,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (cases == NULL) goto failed; for (i = 0; i < len; i++) { match_case_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Match' node")) { goto failed; } @@ -7484,8 +7431,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7521,8 +7467,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (handlers == NULL) goto failed; for (i = 0; i < len; i++) { excepthandler_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7558,8 +7503,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7595,8 +7539,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (finalbody == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7648,8 +7591,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7685,8 +7627,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (handlers == NULL) goto failed; for (i = 0; i < len; i++) { excepthandler_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7722,8 +7663,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7759,8 +7699,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (finalbody == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7857,8 +7796,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { alias_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Import' node")) { goto failed; } @@ -7926,8 +7864,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { alias_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ImportFrom' node")) { goto failed; } @@ -7993,8 +7930,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { identifier val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Global' node")) { goto failed; } @@ -8043,8 +7979,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { identifier val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Nonlocal' node")) { goto failed; } @@ -8269,8 +8204,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (values == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'BoolOp' node")) { goto failed; } @@ -8596,8 +8530,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (keys == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Dict' node")) { goto failed; } @@ -8633,8 +8566,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (values == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Dict' node")) { goto failed; } @@ -8683,8 +8615,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (elts == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Set' node")) { goto failed; } @@ -8751,8 +8682,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ListComp' node")) { goto failed; } @@ -8819,8 +8749,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'SetComp' node")) { goto failed; } @@ -8905,8 +8834,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'DictComp' node")) { goto failed; } @@ -8973,8 +8901,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'GeneratorExp' node")) { goto failed; } @@ -9132,8 +9059,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (ops == NULL) goto failed; for (i = 0; i < len; i++) { cmpop_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Compare' node")) { goto failed; } @@ -9169,8 +9095,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (comparators == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Compare' node")) { goto failed; } @@ -9238,8 +9163,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (args == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Call' node")) { goto failed; } @@ -9275,8 +9199,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (keywords == NULL) goto failed; for (i = 0; i < len; i++) { keyword_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Call' node")) { goto failed; } @@ -9392,8 +9315,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (values == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'JoinedStr' node")) { goto failed; } @@ -9719,8 +9641,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (elts == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'List' node")) { goto failed; } @@ -9787,8 +9708,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (elts == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Tuple' node")) { goto failed; } @@ -10274,8 +10194,7 @@ obj2ast_comprehension(struct ast_state *state, PyObject* obj, comprehension_ty* if (ifs == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'comprehension' node")) { goto failed; } @@ -10464,8 +10383,7 @@ obj2ast_excepthandler(struct ast_state *state, PyObject* obj, excepthandler_ty* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ExceptHandler' node")) { goto failed; } @@ -10526,8 +10444,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (posonlyargs == NULL) goto failed; for (i = 0; i < len; i++) { arg_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10563,8 +10480,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (args == NULL) goto failed; for (i = 0; i < len; i++) { arg_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10617,8 +10533,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (kwonlyargs == NULL) goto failed; for (i = 0; i < len; i++) { arg_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10654,8 +10569,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (kw_defaults == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10708,8 +10622,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (defaults == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -11228,8 +11141,7 @@ obj2ast_match_case(struct ast_state *state, PyObject* obj, match_case_ty* out, if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'match_case' node")) { goto failed; } @@ -11425,8 +11337,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchSequence' node")) { goto failed; } @@ -11477,8 +11388,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (keys == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchMapping' node")) { goto failed; } @@ -11514,8 +11424,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchMapping' node")) { goto failed; } @@ -11601,8 +11510,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchClass' node")) { goto failed; } @@ -11638,8 +11546,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (kwd_attrs == NULL) goto failed; for (i = 0; i < len; i++) { identifier val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchClass' node")) { goto failed; } @@ -11675,8 +11582,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (kwd_patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchClass' node")) { goto failed; } @@ -11804,8 +11710,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchOr' node")) { goto failed; } diff --git a/Python/_warnings.c b/Python/_warnings.c index b46fbdca9db38c..046c37eb49bac0 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -382,8 +382,7 @@ get_filter(PyInterpreterState *interp, PyObject *category, action = get_default_action(interp); if (action != NULL) { - Py_INCREF(Py_None); - *item = Py_None; + *item = Py_NewRef(Py_None); return action; } @@ -468,8 +467,7 @@ normalize_module(PyObject *filename) module = PyUnicode_Substring(filename, 0, len-3); } else { - module = filename; - Py_INCREF(module); + module = Py_NewRef(filename); } return module; } @@ -751,8 +749,7 @@ warn_explicit(PyThreadState *tstate, PyObject *category, PyObject *message, goto cleanup; return_none: - result = Py_None; - Py_INCREF(result); + result = Py_NewRef(Py_None); cleanup: Py_XDECREF(item); @@ -804,8 +801,7 @@ next_external_frame(PyFrameObject *frame) { do { PyFrameObject *back = PyFrame_GetBack(frame); - Py_DECREF(frame); - frame = back; + Py_SETREF(frame, back); } while (frame != NULL && is_internal_frame(frame)); return frame; @@ -831,8 +827,7 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno, if (stack_level <= 0 || is_internal_frame(f)) { while (--stack_level > 0 && f != NULL) { PyFrameObject *back = PyFrame_GetBack(f); - Py_DECREF(f); - f = back; + Py_SETREF(f, back); } } else { @@ -848,8 +843,7 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno, } else { globals = f->f_frame->f_globals; - *filename = f->f_frame->f_code->co_filename; - Py_INCREF(*filename); + *filename = Py_NewRef(f->f_frame->f_code->co_filename); *lineno = PyFrame_GetLineNumber(f); Py_DECREF(f); } diff --git a/Python/adaptive.md b/Python/adaptive.md index e8161bcdd5b9c6..d978c089b237e0 100644 --- a/Python/adaptive.md +++ b/Python/adaptive.md @@ -11,7 +11,7 @@ A family of instructions has the following fundamental properties: generated by the bytecode compiler. * It has a single adaptive instruction that records an execution count and, at regular intervals, attempts to specialize itself. If not specializing, - it executes the non-adaptive instruction. + it executes the base implementation. * It has at least one specialized form of the instruction that is tailored for a particular value or set of values at runtime. * All members of the family must have the same number of inline cache entries, @@ -22,19 +22,18 @@ A family of instructions has the following fundamental properties: The current implementation also requires the following, although these are not fundamental and may change: -* All families uses one or more inline cache entries, +* All families use one or more inline cache entries, the first entry is always the counter. -* All instruction names should start with the name of the non-adaptive +* All instruction names should start with the name of the adaptive instruction. -* The adaptive instruction should end in `_ADAPTIVE`. * Specialized forms should have names describing their specialization. ## Example family -The `LOAD_GLOBAL` instruction (in Python/ceval.c) already has an adaptive +The `LOAD_GLOBAL` instruction (in Python/bytecodes.c) already has an adaptive family that serves as a relatively simple example. -The `LOAD_GLOBAL_ADAPTIVE` instruction performs adaptive specialization, +The `LOAD_GLOBAL` instruction performs adaptive specialization, calling `_Py_Specialize_LoadGlobal()` when the counter reaches zero. There are two specialized instructions in the family, `LOAD_GLOBAL_MODULE` @@ -138,5 +137,5 @@ to eliminate the branches. Finally, take care that stats are gather correctly. After the last `DEOPT_IF` has passed, a hit should be recorded with `STAT_INC(BASE_INSTRUCTION, hit)`. -After a optimization has been deferred in the `ADAPTIVE` form, +After an optimization has been deferred in the adaptive instruction, that should be recorded with `STAT_INC(BASE_INSTRUCTION, deferred)`. diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 426c5341b56fed..1a0b2a05b1c713 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -533,8 +533,7 @@ make_const_tuple(asdl_expr_seq *elts) for (int i = 0; i < asdl_seq_LEN(elts); i++) { expr_ty e = (expr_ty)asdl_seq_GET(elts, i); PyObject *v = e->v.Constant.value; - Py_INCREF(v); - PyTuple_SET_ITEM(newval, i, v); + PyTuple_SET_ITEM(newval, i, Py_NewRef(v)); } return newval; } diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index 6565b6b33ebd52..79b2e2f15ba243 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -13,7 +13,7 @@ _Py_DECLARE_STR(open_br, "{"); _Py_DECLARE_STR(dbl_open_br, "{{"); _Py_DECLARE_STR(close_br, "}"); _Py_DECLARE_STR(dbl_close_br, "}}"); -static PyObject *_str_replace_inf; +#define _str_replace_inf _Py_CACHED_OBJECT(str_replace_inf) /* Forward declarations for recursion via helper functions. */ static PyObject * diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 2809b03d4a936a..ff96c25da5ebc6 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -63,8 +63,7 @@ update_bases(PyObject *bases, PyObject *const *args, Py_ssize_t nargs) } for (j = 0; j < i; j++) { base = args[j]; - PyList_SET_ITEM(new_bases, j, base); - Py_INCREF(base); + PyList_SET_ITEM(new_bases, j, Py_NewRef(base)); } } j = PyList_GET_SIZE(new_bases); @@ -169,9 +168,7 @@ builtin___build_class__(PyObject *self, PyObject *const *args, Py_ssize_t nargs, goto error; } if (winner != meta) { - Py_DECREF(meta); - meta = winner; - Py_INCREF(meta); + Py_SETREF(meta, Py_NewRef(winner)); } } /* else: meta is not a class, so we cannot do the metaclass @@ -221,8 +218,7 @@ builtin___build_class__(PyObject *self, PyObject *const *args, Py_ssize_t nargs, "__class__ set to %.200R defining %.200R as %.200R"; PyErr_Format(PyExc_TypeError, msg, cell_cls, name, cls); } - Py_DECREF(cls); - cls = NULL; + Py_SETREF(cls, NULL); goto error; } } @@ -718,7 +714,7 @@ compile as builtin_compile filename: object(converter="PyUnicode_FSDecoder") mode: str flags: int = 0 - dont_inherit: bool(accept={int}) = False + dont_inherit: bool = False optimize: int = -1 * _feature_version as feature_version: int = -1 @@ -741,7 +737,7 @@ static PyObject * builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename, const char *mode, int flags, int dont_inherit, int optimize, int feature_version) -/*[clinic end generated code: output=b0c09c84f116d3d7 input=40171fb92c1d580d]*/ +/*[clinic end generated code: output=b0c09c84f116d3d7 input=cc78e20e7c7682ba]*/ { PyObject *source_copy; const char *str; @@ -804,8 +800,7 @@ builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename, goto error; if (is_ast) { if (flags & PyCF_ONLY_AST) { - Py_INCREF(source); - result = source; + result = Py_NewRef(source); } else { PyArena *arena; @@ -1128,8 +1123,7 @@ builtin_getattr(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs > 2) { if (_PyObject_LookupAttr(v, name, &result) == 0) { PyObject *dflt = args[2]; - Py_INCREF(dflt); - return dflt; + return Py_NewRef(dflt); } } else { @@ -1162,8 +1156,7 @@ builtin_globals_impl(PyObject *module) PyObject *d; d = PyEval_GetGlobals(); - Py_XINCREF(d); - return d; + return Py_XNewRef(d); } @@ -1390,12 +1383,10 @@ map_reduce(mapobject *lz, PyObject *Py_UNUSED(ignored)) Py_ssize_t i; if (args == NULL) return NULL; - Py_INCREF(lz->func); - PyTuple_SET_ITEM(args, 0, lz->func); + PyTuple_SET_ITEM(args, 0, Py_NewRef(lz->func)); for (i = 0; i<numargs; i++){ PyObject *it = PyTuple_GET_ITEM(lz->iters, i); - Py_INCREF(it); - PyTuple_SET_ITEM(args, i+1, it); + PyTuple_SET_ITEM(args, i+1, Py_NewRef(it)); } return Py_BuildValue("ON", Py_TYPE(lz), args); @@ -1486,8 +1477,7 @@ builtin_next(PyObject *self, PyObject *const *args, Py_ssize_t nargs) return NULL; PyErr_Clear(); } - Py_INCREF(def); - return def; + return Py_NewRef(def); } else if (PyErr_Occurred()) { return NULL; } else { @@ -1723,8 +1713,7 @@ builtin_locals_impl(PyObject *module) PyObject *d; d = PyEval_GetLocals(); - Py_XINCREF(d); - return d; + return Py_XNewRef(d); } @@ -1785,8 +1774,7 @@ min_max(PyObject *args, PyObject *kwds, int op) } /* no key function; the value is the item */ else { - val = item; - Py_INCREF(val); + val = Py_NewRef(item); } /* maximum value and item are unset; set them */ @@ -1816,8 +1804,7 @@ min_max(PyObject *args, PyObject *kwds, int op) if (maxval == NULL) { assert(maxitem == NULL); if (defaultval != NULL) { - Py_INCREF(defaultval); - maxitem = defaultval; + maxitem = Py_NewRef(defaultval); } else { PyErr_Format(PyExc_ValueError, "%s() arg is an empty sequence", name); @@ -2413,8 +2400,7 @@ builtin_vars(PyObject *self, PyObject *args) if (!PyArg_UnpackTuple(args, "vars", 0, 1, &v)) return NULL; if (v == NULL) { - d = PyEval_GetLocals(); - Py_XINCREF(d); + d = Py_XNewRef(PyEval_GetLocals()); } else { if (_PyObject_LookupAttr(v, &_Py_ID(__dict__), &d) == 0) { @@ -2496,8 +2482,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) long i_result = PyLong_AsLongAndOverflow(result, &overflow); /* If this already overflowed, don't even enter the loop. */ if (overflow == 0) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } while(result == NULL) { item = PyIter_Next(iter); @@ -2547,8 +2532,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) if (PyFloat_CheckExact(result)) { double f_result = PyFloat_AS_DOUBLE(result); - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); while(result == NULL) { item = PyIter_Next(iter); if (item == NULL) { @@ -2595,8 +2579,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) if (item == NULL) { /* error, or end-of-sequence */ if (PyErr_Occurred()) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } break; } @@ -2737,8 +2720,7 @@ zip_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } for (i=0 ; i < tuplesize ; i++) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(result, i, Py_None); + PyTuple_SET_ITEM(result, i, Py_NewRef(Py_None)); } /* create zipobject structure */ diff --git a/Python/bootstrap_hash.c b/Python/bootstrap_hash.c index 3a2a7318086f77..587063ef1ab29a 100644 --- a/Python/bootstrap_hash.c +++ b/Python/bootstrap_hash.c @@ -1,6 +1,7 @@ #include "Python.h" #include "pycore_initconfig.h" #include "pycore_fileutils.h" // _Py_fstat_noraise() +#include "pycore_runtime.h" // _PyRuntime #ifdef MS_WINDOWS # include <windows.h> @@ -263,11 +264,7 @@ py_getentropy(char *buffer, Py_ssize_t size, int raise) #endif /* defined(HAVE_GETENTROPY) && !(defined(__sun) && defined(__SVR4)) */ -static struct { - int fd; - dev_t st_dev; - ino_t st_ino; -} urandom_cache = { -1 }; +#define urandom_cache (_PyRuntime.pyhash_state.urandom_cache) /* Read random bytes from the /dev/urandom device: @@ -402,6 +399,9 @@ dev_urandom_close(void) urandom_cache.fd = -1; } } + +#undef urandom_cache + #endif /* !MS_WINDOWS */ diff --git a/Python/bytecodes.c b/Python/bytecodes.c new file mode 100644 index 00000000000000..1331312ef75dab --- /dev/null +++ b/Python/bytecodes.c @@ -0,0 +1,3694 @@ +// This file contains instruction definitions. +// It is read by Tools/cases_generator/generate_cases.py +// to generate Python/generated_cases.c.h. +// Note that there is some dummy C code at the top and bottom of the file +// to fool text editors like VS Code into believing this is valid C code. +// The actual instruction definitions start at // BEGIN BYTECODES //. +// See Tools/cases_generator/README.md for more information. + +#include "Python.h" +#include "pycore_abstract.h" // _PyIndex_Check() +#include "pycore_call.h" // _PyObject_FastCallDictTstate() +#include "pycore_ceval.h" // _PyEval_SignalAsyncExc() +#include "pycore_code.h" +#include "pycore_function.h" +#include "pycore_long.h" // _PyLong_GetZero() +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_moduleobject.h" // PyModuleObject +#include "pycore_opcode.h" // EXTRA_CASES +#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pymem.h" // _PyMem_IsPtrFreed() +#include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_range.h" // _PyRangeIterObject +#include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs +#include "pycore_sysmodule.h" // _PySys_Audit() +#include "pycore_tuple.h" // _PyTuple_ITEMS() +#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS + +#include "pycore_dict.h" +#include "dictobject.h" +#include "pycore_frame.h" +#include "opcode.h" +#include "pydtrace.h" +#include "setobject.h" +#include "structmember.h" // struct PyMemberDef, T_OFFSET_EX + +void _PyFloat_ExactDealloc(PyObject *); +void _PyUnicode_ExactDealloc(PyObject *); + +/* Stack effect macros + * These will be mostly replaced by stack effect descriptions, + * but the tooling need to recognize them. + */ +#define SET_TOP(v) (stack_pointer[-1] = (v)) +#define SET_SECOND(v) (stack_pointer[-2] = (v)) +#define PEEK(n) (stack_pointer[-(n)]) +#define POKE(n, v) (stack_pointer[-(n)] = (v)) +#define PUSH(val) (*(stack_pointer++) = (val)) +#define POP() (*(--stack_pointer)) +#define TOP() PEEK(1) +#define SECOND() PEEK(2) +#define STACK_GROW(n) (stack_pointer += (n)) +#define STACK_SHRINK(n) (stack_pointer -= (n)) +#define EMPTY() 1 +#define STACK_LEVEL() 2 + +/* Local variable macros */ +#define GETLOCAL(i) (frame->localsplus[i]) +#define SETLOCAL(i, val) \ +do { \ + PyObject *_tmp = frame->localsplus[i]; \ + frame->localsplus[i] = (val); \ + Py_XDECREF(_tmp); \ +} while (0) + +/* Flow control macros */ +#define DEOPT_IF(cond, instname) ((void)0) +#define ERROR_IF(cond, labelname) ((void)0) +#define JUMPBY(offset) ((void)0) +#define GO_TO_INSTRUCTION(instname) ((void)0) +#define DISPATCH_SAME_OPARG() ((void)0) + +#define inst(name, ...) case name: +#define op(name, ...) /* NAME is ignored */ +#define macro(name) static int MACRO_##name +#define super(name) static int SUPER_##name +#define family(name, ...) static int family_##name + +#define NAME_ERROR_MSG \ + "name '%.200s' is not defined" + +// Dummy variables for stack effects. +static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub; +static PyObject *container, *start, *stop, *v, *lhs, *rhs; +static PyObject *list, *tuple, *dict, *owner; +static PyObject *exit_func, *lasti, *val, *retval, *obj, *iter; +static size_t jump; +// Dummy variables for cache effects +static _Py_CODEUNIT when_to_jump_mask, invert, counter, index, hint; +static uint32_t type_version; +// Dummy opcode names for 'op' opcodes +#define _COMPARE_OP_FLOAT 1003 +#define _COMPARE_OP_INT 1004 +#define _COMPARE_OP_STR 1005 +#define _JUMP_IF 1006 + +static PyObject * +dummy_func( + PyThreadState *tstate, + _PyInterpreterFrame *frame, + unsigned char opcode, + unsigned int oparg, + _Py_atomic_int * const eval_breaker, + _PyCFrame cframe, + PyObject *names, + PyObject *consts, + _Py_CODEUNIT *next_instr, + PyObject **stack_pointer, + PyObject *kwnames, + int throwflag, + binaryfunc binary_ops[] +) +{ + _PyInterpreterFrame entry_frame; + + switch (opcode) { + +// BEGIN BYTECODES // + inst(NOP, (--)) { + } + + inst(RESUME, (--)) { + assert(tstate->cframe == &cframe); + assert(frame == cframe.current_frame); + if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + goto handle_eval_breaker; + } + } + + inst(LOAD_CLOSURE, (-- value)) { + /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ + value = GETLOCAL(oparg); + ERROR_IF(value == NULL, unbound_local_error); + Py_INCREF(value); + } + + inst(LOAD_FAST_CHECK, (-- value)) { + value = GETLOCAL(oparg); + ERROR_IF(value == NULL, unbound_local_error); + Py_INCREF(value); + } + + inst(LOAD_FAST, (-- value)) { + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + } + + inst(LOAD_FAST_R, (-- value)) { + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + } + + inst(LOAD_CONST, (-- value)) { + value = GETITEM(consts, oparg); + Py_INCREF(value); + } + + inst(STORE_FAST, (value --)) { + SETLOCAL(oparg, value); + } + + inst(STORE_FAST_R, (value --)) { + SETLOCAL(oparg, value); + } + + super(LOAD_FAST__LOAD_FAST) = LOAD_FAST + LOAD_FAST; + super(LOAD_FAST__LOAD_CONST) = LOAD_FAST + LOAD_CONST; + super(STORE_FAST__LOAD_FAST) = STORE_FAST + LOAD_FAST; + super(STORE_FAST__STORE_FAST) = STORE_FAST + STORE_FAST; + super(LOAD_CONST__LOAD_FAST) = LOAD_CONST + LOAD_FAST; + + inst(POP_TOP, (value --)) { + DECREF_INPUTS(); + } + + inst(PUSH_NULL, (-- res)) { + res = NULL; + } + + macro(END_FOR) = POP_TOP + POP_TOP; + + inst(UNARY_POSITIVE, (value -- res)) { + res = PyNumber_Positive(value); + DECREF_INPUTS(); + ERROR_IF(res == NULL, error); + } + + register inst(UNARY_POSITIVE_R, (value -- res)) { + assert(value != NULL); + res = PyNumber_Positive(value); + ERROR_IF(res == NULL, error); + } + + inst(UNARY_NEGATIVE, (value -- res)) { + res = PyNumber_Negative(value); + DECREF_INPUTS(); + ERROR_IF(res == NULL, error); + } + + inst(UNARY_NEGATIVE_R, (--)) { + PyObject *value = REG(oparg1); + assert(value != NULL); + PyObject *res = PyNumber_Negative(value); + Py_XSETREF(REG(oparg2), res); + ERROR_IF(res == NULL, error); + } + + inst(UNARY_NOT, (value -- res)) { + int err = PyObject_IsTrue(value); + DECREF_INPUTS(); + ERROR_IF(err < 0, error); + if (err == 0) { + res = Py_True; + } + else { + res = Py_False; + } + Py_INCREF(res); + } + + inst(UNARY_NOT_R, (--)) { + PyObject *value = REG(oparg1); + assert(value != NULL); + int err = PyObject_IsTrue(value); + ERROR_IF(err < 0, error); + PyObject *res; + if (err == 0) { + res = Py_True; + } + else { + res = Py_False; + } + Py_INCREF(res); + Py_XSETREF(REG(oparg2), res); + } + + inst(UNARY_INVERT, (value -- res)) { + res = PyNumber_Invert(value); + DECREF_INPUTS(); + ERROR_IF(res == NULL, error); + } + + inst(UNARY_INVERT_R, (--)) { + PyObject *value = REG(oparg1); + assert(value != NULL); + PyObject *res = PyNumber_Invert(value); + ERROR_IF(res == NULL, error); + Py_XSETREF(REG(oparg2), res); + } + + family(binary_op, INLINE_CACHE_ENTRIES_BINARY_OP) = { + BINARY_OP, + BINARY_OP_ADD_FLOAT, + BINARY_OP_ADD_INT, + BINARY_OP_ADD_UNICODE, + // BINARY_OP_INPLACE_ADD_UNICODE, // This is an odd duck. + BINARY_OP_MULTIPLY_FLOAT, + BINARY_OP_MULTIPLY_INT, + BINARY_OP_SUBTRACT_FLOAT, + BINARY_OP_SUBTRACT_INT, + }; + + + inst(BINARY_OP_MULTIPLY_INT, (unused/1, left, right -- prod)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + prod = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + ERROR_IF(prod == NULL, error); + } + + inst(BINARY_OP_MULTIPLY_FLOAT, (unused/1, left, right -- prod)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dprod = ((PyFloatObject *)left)->ob_fval * + ((PyFloatObject *)right)->ob_fval; + prod = PyFloat_FromDouble(dprod); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + ERROR_IF(prod == NULL, error); + } + + inst(BINARY_OP_SUBTRACT_INT, (unused/1, left, right -- sub)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sub = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + ERROR_IF(sub == NULL, error); + } + + inst(BINARY_OP_SUBTRACT_FLOAT, (unused/1, left, right -- sub)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; + sub = PyFloat_FromDouble(dsub); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + ERROR_IF(sub == NULL, error); + } + + inst(BINARY_OP_ADD_UNICODE, (unused/1, left, right -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + res = PyUnicode_Concat(left, right); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + ERROR_IF(res == NULL, error); + } + + // This is a subtle one. It's a super-instruction for + // BINARY_OP_ADD_UNICODE followed by STORE_FAST + // where the store goes into the left argument. + // So the inputs are the same as for all BINARY_OP + // specializations, but there is no output. + // At the end we just skip over the STORE_FAST. + inst(BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; + assert(_Py_OPCODE(true_next) == STORE_FAST || + _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST); + PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next)); + DEOPT_IF(*target_local != left, BINARY_OP); + STAT_INC(BINARY_OP, hit); + /* Handle `left = left + right` or `left += right` for str. + * + * When possible, extend `left` in place rather than + * allocating a new PyUnicodeObject. This attempts to avoid + * quadratic behavior when one neglects to use str.join(). + * + * If `left` has only two references remaining (one from + * the stack, one in the locals), DECREFing `left` leaves + * only the locals reference, so PyUnicode_Append knows + * that the string is safe to mutate. + */ + assert(Py_REFCNT(left) >= 2); + _Py_DECREF_NO_DEALLOC(left); + PyUnicode_Append(target_local, right); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + ERROR_IF(*target_local == NULL, error); + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); + } + + inst(BINARY_OP_ADD_FLOAT, (unused/1, left, right -- sum)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsum = ((PyFloatObject *)left)->ob_fval + + ((PyFloatObject *)right)->ob_fval; + sum = PyFloat_FromDouble(dsum); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + ERROR_IF(sum == NULL, error); + } + + inst(BINARY_OP_ADD_INT, (unused/1, left, right -- sum)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sum = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + ERROR_IF(sum == NULL, error); + } + + family(binary_subscr, INLINE_CACHE_ENTRIES_BINARY_SUBSCR) = { + BINARY_SUBSCR, + BINARY_SUBSCR_DICT, + BINARY_SUBSCR_GETITEM, + BINARY_SUBSCR_LIST_INT, + BINARY_SUBSCR_TUPLE_INT, + }; + + inst(BINARY_SUBSCR, (unused/4, container, sub -- res)) { + _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_BinarySubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_SUBSCR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + res = PyObject_GetItem(container, sub); + DECREF_INPUTS(); + ERROR_IF(res == NULL, error); + } + + inst(BINARY_SLICE, (container, start, stop -- res)) { + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + // Can't use ERROR_IF() here, because we haven't + // DECREF'ed container yet, and we still own slice. + if (slice == NULL) { + res = NULL; + } + else { + res = PyObject_GetItem(container, slice); + Py_DECREF(slice); + } + Py_DECREF(container); + ERROR_IF(res == NULL, error); + } + + inst(STORE_SLICE, (v, container, start, stop -- )) { + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + int err; + if (slice == NULL) { + err = 1; + } + else { + err = PyObject_SetItem(container, slice, v); + Py_DECREF(slice); + } + Py_DECREF(v); + Py_DECREF(container); + ERROR_IF(err, error); + } + + inst(BINARY_SUBSCR_LIST_INT, (unused/4, list, sub -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyList_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyList_GET_ITEM(list, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + } + + inst(BINARY_SUBSCR_TUPLE_INT, (unused/4, tuple, sub -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyTuple_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyTuple_GET_ITEM(tuple, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(tuple); + } + + inst(BINARY_SUBSCR_DICT, (unused/4, dict, sub -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyDict_GetItemWithError(dict, sub); + if (res == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetKeyError(sub); + } + Py_DECREF(dict); + Py_DECREF(sub); + ERROR_IF(true, error); + } + Py_INCREF(res); // Do this before DECREF'ing dict, sub + DECREF_INPUTS(); + } + + inst(BINARY_SUBSCR_GETITEM, (unused/1, type_version/2, func_version/1, container, sub -- unused)) { + PyTypeObject *tp = Py_TYPE(container); + DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); + assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); + PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; + assert(PyFunction_Check(cached)); + PyFunctionObject *getitem = (PyFunctionObject *)cached; + DEOPT_IF(getitem->func_version != func_version, BINARY_SUBSCR); + PyCodeObject *code = (PyCodeObject *)getitem->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + Py_INCREF(getitem); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem); + STACK_SHRINK(2); + new_frame->localsplus[0] = container; + new_frame->localsplus[1] = sub; + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + DISPATCH_INLINED(new_frame); + } + + // Alternative: (list, unused[oparg], v -- list, unused[oparg]) + inst(LIST_APPEND, (v --)) { + PyObject *list = PEEK(oparg + 1); // +1 to account for v staying on stack + ERROR_IF(_PyList_AppendTakeRef((PyListObject *)list, v) < 0, error); + PREDICT(JUMP_BACKWARD); + } + + // Alternative: (set, unused[oparg], v -- set, unused[oparg]) + inst(SET_ADD, (v --)) { + PyObject *set = PEEK(oparg + 1); // +1 to account for v staying on stack + int err = PySet_Add(set, v); + Py_DECREF(v); + ERROR_IF(err, error); + PREDICT(JUMP_BACKWARD); + } + + family(store_subscr) = { + STORE_SUBSCR, + STORE_SUBSCR_DICT, + STORE_SUBSCR_LIST_INT, + }; + + inst(STORE_SUBSCR, (counter/1, v, container, sub -- )) { + if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_StoreSubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_SUBSCR, deferred); + _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* container[sub] = v */ + int err = PyObject_SetItem(container, sub, v); + DECREF_INPUTS(); + ERROR_IF(err, error); + } + + inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); + + // Ensure nonnegative, zero-or-one-digit ints. + DEOPT_IF(((size_t)Py_SIZE(sub)) > 1, STORE_SUBSCR); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + // Ensure index < len(list) + DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); + STAT_INC(STORE_SUBSCR, hit); + + PyObject *old_value = PyList_GET_ITEM(list, index); + PyList_SET_ITEM(list, index, value); + assert(old_value != NULL); + Py_DECREF(old_value); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + } + + inst(STORE_SUBSCR_DICT, (unused/1, value, dict, sub -- )) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); + STAT_INC(STORE_SUBSCR, hit); + int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); + Py_DECREF(dict); + ERROR_IF(err, error); + } + + inst(DELETE_SUBSCR, (container, sub --)) { + /* del container[sub] */ + int err = PyObject_DelItem(container, sub); + DECREF_INPUTS(); + ERROR_IF(err, error); + } + + inst(PRINT_EXPR, (value --)) { + PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook)); + PyObject *res; + // Can't use ERROR_IF here. + if (hook == NULL) { + _PyErr_SetString(tstate, PyExc_RuntimeError, + "lost sys.displayhook"); + DECREF_INPUTS(); + ERROR_IF(true, error); + } + res = PyObject_CallOneArg(hook, value); + DECREF_INPUTS(); + ERROR_IF(res == NULL, error); + Py_DECREF(res); + } + + // stack effect: (__array[oparg] -- ) + inst(RAISE_VARARGS) { + PyObject *cause = NULL, *exc = NULL; + switch (oparg) { + case 2: + cause = POP(); /* cause */ + /* fall through */ + case 1: + exc = POP(); /* exc */ + /* fall through */ + case 0: + if (do_raise(tstate, exc, cause)) { + goto exception_unwind; + } + break; + default: + _PyErr_SetString(tstate, PyExc_SystemError, + "bad RAISE_VARARGS oparg"); + break; + } + goto error; + } + + inst(INTERPRETER_EXIT, (retval --)) { + assert(frame == &entry_frame); + assert(_PyFrame_IsIncomplete(frame)); + STACK_SHRINK(1); // Since we're not going to DISPATCH() + assert(EMPTY()); + /* Restore previous cframe and return. */ + tstate->cframe = cframe.previous; + tstate->cframe->use_tracing = cframe.use_tracing; + assert(tstate->cframe->current_frame == frame->previous); + assert(!_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallTstate(tstate); + return retval; + } + + inst(RETURN_VALUE, (retval --)) { + STACK_SHRINK(1); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = cframe.current_frame = dying->previous; + _PyEvalFrameClearAndPop(tstate, dying); + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + inst(GET_AITER, (obj -- iter)) { + unaryfunc getter = NULL; + PyTypeObject *type = Py_TYPE(obj); + + if (type->tp_as_async != NULL) { + getter = type->tp_as_async->am_aiter; + } + + if (getter == NULL) { + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an object with " + "__aiter__ method, got %.100s", + type->tp_name); + DECREF_INPUTS(); + ERROR_IF(true, error); + } + + iter = (*getter)(obj); + DECREF_INPUTS(); + ERROR_IF(iter == NULL, error); + + if (Py_TYPE(iter)->tp_as_async == NULL || + Py_TYPE(iter)->tp_as_async->am_anext == NULL) { + + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' received an object from __aiter__ " + "that does not implement __anext__: %.100s", + Py_TYPE(iter)->tp_name); + Py_DECREF(iter); + ERROR_IF(true, error); + } + } + + // stack effect: ( -- __0) + inst(GET_ANEXT) { + unaryfunc getter = NULL; + PyObject *next_iter = NULL; + PyObject *awaitable = NULL; + PyObject *aiter = TOP(); + PyTypeObject *type = Py_TYPE(aiter); + + if (PyAsyncGen_CheckExact(aiter)) { + awaitable = type->tp_as_async->am_anext(aiter); + if (awaitable == NULL) { + goto error; + } + } else { + if (type->tp_as_async != NULL){ + getter = type->tp_as_async->am_anext; + } + + if (getter != NULL) { + next_iter = (*getter)(aiter); + if (next_iter == NULL) { + goto error; + } + } + else { + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an iterator with " + "__anext__ method, got %.100s", + type->tp_name); + goto error; + } + + awaitable = _PyCoro_GetAwaitableIter(next_iter); + if (awaitable == NULL) { + _PyErr_FormatFromCause( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(next_iter)->tp_name); + + Py_DECREF(next_iter); + goto error; + } else { + Py_DECREF(next_iter); + } + } + + PUSH(awaitable); + PREDICT(LOAD_CONST); + } + + // stack effect: ( -- ) + inst(GET_AWAITABLE) { + PyObject *iterable = TOP(); + PyObject *iter = _PyCoro_GetAwaitableIter(iterable); + + if (iter == NULL) { + format_awaitable_error(tstate, Py_TYPE(iterable), oparg); + } + + Py_DECREF(iterable); + + if (iter != NULL && PyCoro_CheckExact(iter)) { + PyObject *yf = _PyGen_yf((PyGenObject*)iter); + if (yf != NULL) { + /* `iter` is a coroutine object that is being + awaited, `yf` is a pointer to the current awaitable + being awaited on. */ + Py_DECREF(yf); + Py_CLEAR(iter); + _PyErr_SetString(tstate, PyExc_RuntimeError, + "coroutine is being awaited already"); + /* The code below jumps to `error` if `iter` is NULL. */ + } + } + + SET_TOP(iter); /* Even if it's NULL */ + + if (iter == NULL) { + goto error; + } + + PREDICT(LOAD_CONST); + } + + // error: SEND stack effect depends on jump flag + inst(SEND) { + assert(frame != &entry_frame); + assert(STACK_LEVEL() >= 2); + PyObject *v = POP(); + PyObject *receiver = TOP(); + PySendResult gen_status; + PyObject *retval; + if (tstate->c_tracefunc == NULL) { + gen_status = PyIter_Send(receiver, v, &retval); + } else { + if (Py_IsNone(v) && PyIter_Check(receiver)) { + retval = Py_TYPE(receiver)->tp_iternext(receiver); + } + else { + retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); + } + if (retval == NULL) { + if (tstate->c_tracefunc != NULL + && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + if (_PyGen_FetchStopIterationValue(&retval) == 0) { + gen_status = PYGEN_RETURN; + } + else { + gen_status = PYGEN_ERROR; + } + } + else { + gen_status = PYGEN_NEXT; + } + } + Py_DECREF(v); + if (gen_status == PYGEN_ERROR) { + assert(retval == NULL); + goto error; + } + if (gen_status == PYGEN_RETURN) { + assert(retval != NULL); + Py_DECREF(receiver); + SET_TOP(retval); + JUMPBY(oparg); + } + else { + assert(gen_status == PYGEN_NEXT); + assert(retval != NULL); + PUSH(retval); + } + } + + // stack effect: ( -- ) + inst(ASYNC_GEN_WRAP) { + PyObject *v = TOP(); + assert(frame->f_code->co_flags & CO_ASYNC_GENERATOR); + PyObject *w = _PyAsyncGenValueWrapperNew(v); + if (w == NULL) { + goto error; + } + SET_TOP(w); + Py_DECREF(v); + } + + // stack effect: ( -- ) + inst(YIELD_VALUE) { + // NOTE: It's important that YIELD_VALUE never raises an exception! + // The compiler treats any exception raised here as a failed close() + // or throw() call. + assert(oparg == STACK_LEVEL()); + assert(frame != &entry_frame); + PyObject *retval = POP(); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_SUSPENDED; + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + _Py_LeaveRecursiveCallPy(tstate); + _PyInterpreterFrame *gen_frame = frame; + frame = cframe.current_frame = frame->previous; + gen_frame->previous = NULL; + frame->prev_instr -= frame->yield_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + // stack effect: (__0 -- ) + inst(POP_EXCEPT) { + _PyErr_StackItem *exc_info = tstate->exc_info; + PyObject *value = exc_info->exc_value; + exc_info->exc_value = POP(); + Py_XDECREF(value); + } + + // stack effect: (__0 -- ) + inst(RERAISE) { + if (oparg) { + PyObject *lasti = PEEK(oparg + 1); + if (PyLong_Check(lasti)) { + frame->prev_instr = _PyCode_CODE(frame->f_code) + PyLong_AsLong(lasti); + assert(!_PyErr_Occurred(tstate)); + } + else { + assert(PyLong_Check(lasti)); + _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); + goto error; + } + } + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + + // stack effect: (__0 -- ) + inst(PREP_RERAISE_STAR) { + PyObject *excs = POP(); + assert(PyList_Check(excs)); + PyObject *orig = POP(); + + PyObject *val = _PyExc_PrepReraiseStar(orig, excs); + Py_DECREF(excs); + Py_DECREF(orig); + + if (val == NULL) { + goto error; + } + + PUSH(val); + } + + // stack effect: (__0, __1 -- ) + inst(END_ASYNC_FOR) { + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + if (PyErr_GivenExceptionMatches(val, PyExc_StopAsyncIteration)) { + Py_DECREF(val); + Py_DECREF(POP()); + } + else { + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + } + + // stack effect: (__0, __1 -- ) + inst(CLEANUP_THROW) { + assert(throwflag); + PyObject *exc_value = TOP(); + assert(exc_value && PyExceptionInstance_Check(exc_value)); + if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { + PyObject *value = ((PyStopIterationObject *)exc_value)->value; + Py_INCREF(value); + Py_DECREF(POP()); // The StopIteration. + Py_DECREF(POP()); // The last sent value. + Py_DECREF(POP()); // The delegated sub-iterator. + PUSH(value); + } + else { + PyObject *exc_type = Py_NewRef(Py_TYPE(exc_value)); + PyObject *exc_traceback = PyException_GetTraceback(exc_value); + _PyErr_Restore(tstate, exc_type, Py_NewRef(exc_value), exc_traceback); + goto exception_unwind; + } + } + + inst(STOPITERATION_ERROR) { + assert(frame->owner == FRAME_OWNED_BY_GENERATOR); + PyObject *exc = TOP(); + assert(PyExceptionInstance_Check(exc)); + const char *msg = NULL; + if (PyErr_GivenExceptionMatches(exc, PyExc_StopIteration)) { + msg = "generator raised StopIteration"; + if (frame->f_code->co_flags & CO_ASYNC_GENERATOR) { + msg = "async generator raised StopIteration"; + } + else if (frame->f_code->co_flags & CO_COROUTINE) { + msg = "coroutine raised StopIteration"; + } + } + else if ((frame->f_code->co_flags & CO_ASYNC_GENERATOR) && + PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) + { + /* code in `gen` raised a StopAsyncIteration error: + raise a RuntimeError. + */ + msg = "async generator raised StopAsyncIteration"; + } + if (msg != NULL) { + PyObject *message = _PyUnicode_FromASCII(msg, strlen(msg)); + if (message == NULL) { + goto error; + } + PyObject *error = PyObject_CallOneArg(PyExc_RuntimeError, message); + if (error == NULL) { + Py_DECREF(message); + goto error; + } + assert(PyExceptionInstance_Check(error)); + SET_TOP(error); + PyException_SetCause(error, Py_NewRef(exc)); + // Steal exc reference, rather than Py_NewRef+Py_DECREF + PyException_SetContext(error, exc); + Py_DECREF(message); + } + } + + + // stack effect: ( -- __0) + inst(LOAD_ASSERTION_ERROR) { + PyObject *value = PyExc_AssertionError; + PUSH(Py_NewRef(value)); + } + + // stack effect: ( -- __0) + inst(LOAD_BUILD_CLASS) { + PyObject *bc; + if (PyDict_CheckExact(BUILTINS())) { + bc = _PyDict_GetItemWithError(BUILTINS(), + &_Py_ID(__build_class__)); + if (bc == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + } + goto error; + } + Py_INCREF(bc); + } + else { + bc = PyObject_GetItem(BUILTINS(), &_Py_ID(__build_class__)); + if (bc == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + goto error; + } + } + PUSH(bc); + } + + // stack effect: (__0 -- ) + inst(STORE_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *v = POP(); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when storing %R", name); + Py_DECREF(v); + goto error; + } + if (PyDict_CheckExact(ns)) + err = PyDict_SetItem(ns, name, v); + else + err = PyObject_SetItem(ns, name, v); + Py_DECREF(v); + if (err != 0) + goto error; + } + + inst(DELETE_NAME, (--)) { + PyObject *name = GETITEM(names, oparg); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when deleting %R", name); + goto error; + } + err = PyObject_DelItem(ns, name); + // Can't use ERROR_IF here. + if (err != 0) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, + name); + goto error; + } + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE) { + _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *seq = TOP(); + next_instr -= OPSIZE; + _Py_Specialize_UnpackSequence(seq, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(UNPACK_SEQUENCE, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *seq = POP(); + PyObject **top = stack_pointer + oparg; + if (!unpack_iterable(tstate, seq, oparg, -1, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(oparg); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE_TWO_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + SET_TOP(Py_NewRef(PyTuple_GET_ITEM(seq, 1))); + PUSH(Py_NewRef(PyTuple_GET_ITEM(seq, 0))); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyTuple_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE_LIST) { + PyObject *seq = TOP(); + DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyList_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // error: UNPACK_EX has irregular stack effect + inst(UNPACK_EX) { + int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); + PyObject *seq = POP(); + PyObject **top = stack_pointer + totalargs; + if (!unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(totalargs); + Py_DECREF(seq); + } + + family(store_attr) = { + STORE_ATTR, + STORE_ATTR_INSTANCE_VALUE, + STORE_ATTR_SLOT, + STORE_ATTR_WITH_HINT, + }; + + inst(STORE_ATTR, (counter/1, unused/3, v, owner --)) { + if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { + assert(cframe.use_tracing == 0); + PyObject *name = GETITEM(names, oparg); + next_instr -= OPSIZE; + _Py_Specialize_StoreAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_ATTR, deferred); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg); + int err = PyObject_SetAttr(owner, name, v); + Py_DECREF(v); + Py_DECREF(owner); + ERROR_IF(err, error); + } + + inst(DELETE_ATTR, (owner --)) { + PyObject *name = GETITEM(names, oparg); + int err = PyObject_SetAttr(owner, name, (PyObject *)NULL); + Py_DECREF(owner); + ERROR_IF(err, error); + } + + inst(STORE_GLOBAL, (v --)) { + PyObject *name = GETITEM(names, oparg); + int err = PyDict_SetItem(GLOBALS(), name, v); + Py_DECREF(v); + ERROR_IF(err, error); + } + + inst(DELETE_GLOBAL, (--)) { + PyObject *name = GETITEM(names, oparg); + int err; + err = PyDict_DelItem(GLOBALS(), name); + // Can't use ERROR_IF here. + if (err != 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + + // stack effect: ( -- __0) + inst(LOAD_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *locals = LOCALS(); + PyObject *v; + if (locals == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when loading %R", name); + goto error; + } + if (PyDict_CheckExact(locals)) { + v = PyDict_GetItemWithError(locals, name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + v = PyObject_GetItem(locals, name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + goto error; + _PyErr_Clear(tstate); + } + } + if (v == NULL) { + v = PyDict_GetItemWithError(GLOBALS(), name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + else { + if (PyDict_CheckExact(BUILTINS())) { + v = PyDict_GetItemWithError(BUILTINS(), name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + } + PUSH(v); + } + + // error: LOAD_GLOBAL has irregular stack effect + inst(LOAD_GLOBAL) { + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *name = GETITEM(names, oparg>>1); + next_instr -= OPSIZE; + _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_GLOBAL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int push_null = oparg & 1; + PEEK(0) = NULL; + PyObject *name = GETITEM(names, oparg>>1); + PyObject *v; + if (PyDict_CheckExact(GLOBALS()) + && PyDict_CheckExact(BUILTINS())) + { + v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + /* _PyDict_LoadGlobal() returns NULL without raising + * an exception if the key doesn't exist */ + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + /* Slow-path if globals or builtins is not a dict */ + + /* namespace 1: globals */ + v = PyObject_GetItem(GLOBALS(), name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + + /* namespace 2: builtins */ + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + /* Skip over inline cache */ + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STACK_GROW(push_null); + PUSH(v); + } + + // error: LOAD_GLOBAL has irregular stack effect + inst(LOAD_GLOBAL_MODULE) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + PyDictObject *dict = (PyDictObject *)GLOBALS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t version = read_u32(cache->module_keys_version); + DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + } + + // error: LOAD_GLOBAL has irregular stack effect + inst(LOAD_GLOBAL_BUILTIN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); + PyDictObject *mdict = (PyDictObject *)GLOBALS(); + PyDictObject *bdict = (PyDictObject *)BUILTINS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t mod_version = read_u32(cache->module_keys_version); + uint16_t bltn_version = cache->builtin_keys_version; + DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); + DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(bdict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + } + + inst(DELETE_FAST, (--)) { + PyObject *v = GETLOCAL(oparg); + ERROR_IF(v == NULL, unbound_local_error); + SETLOCAL(oparg, NULL); + } + + inst(MAKE_CELL, (--)) { + // "initial" is probably NULL but not if it's an arg (or set + // via PyFrame_LocalsToFast() before MAKE_CELL has run). + PyObject *initial = GETLOCAL(oparg); + PyObject *cell = PyCell_New(initial); + if (cell == NULL) { + goto resume_with_error; + } + SETLOCAL(oparg, cell); + } + + inst(DELETE_DEREF, (--)) { + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + // Can't use ERROR_IF here. + // Fortunately we don't need its superpower. + if (oldobj == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PyCell_SET(cell, NULL); + Py_DECREF(oldobj); + } + + // stack effect: ( -- __0) + inst(LOAD_CLASSDEREF) { + PyObject *name, *value, *locals = LOCALS(); + assert(locals); + assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); + name = PyTuple_GET_ITEM(frame->f_code->co_localsplusnames, oparg); + if (PyDict_CheckExact(locals)) { + value = PyDict_GetItemWithError(locals, name); + if (value != NULL) { + Py_INCREF(value); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + value = PyObject_GetItem(locals, name); + if (value == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + } + } + if (!value) { + PyObject *cell = GETLOCAL(oparg); + value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + Py_INCREF(value); + } + PUSH(value); + } + + // stack effect: ( -- __0) + inst(LOAD_DEREF) { + PyObject *cell = GETLOCAL(oparg); + PyObject *value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PUSH(Py_NewRef(value)); + } + + // stack effect: (__0 -- ) + inst(STORE_DEREF) { + PyObject *v = POP(); + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + PyCell_SET(cell, v); + Py_XDECREF(oldobj); + } + + // stack effect: ( -- ) + inst(COPY_FREE_VARS) { + /* Copy closure variables to free variables */ + PyCodeObject *co = frame->f_code; + assert(PyFunction_Check(frame->f_funcobj)); + PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + int offset = co->co_nlocals + co->co_nplaincellvars; + assert(oparg == co->co_nfreevars); + for (int i = 0; i < oparg; ++i) { + PyObject *o = PyTuple_GET_ITEM(closure, i); + frame->localsplus[offset + i] = Py_NewRef(o); + } + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_STRING) { + PyObject *str; + str = _PyUnicode_JoinArray(&_Py_STR(empty), + stack_pointer - oparg, oparg); + if (str == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + Py_DECREF(item); + } + PUSH(str); + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_TUPLE) { + STACK_SHRINK(oparg); + PyObject *tup = _PyTuple_FromArraySteal(stack_pointer, oparg); + if (tup == NULL) + goto error; + PUSH(tup); + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_LIST) { + PyObject *list = PyList_New(oparg); + if (list == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + PyList_SET_ITEM(list, oparg, item); + } + PUSH(list); + } + + // stack effect: ( -- ) + inst(LIST_TO_TUPLE) { + PyObject *list = POP(); + PyObject *tuple = PyList_AsTuple(list); + Py_DECREF(list); + if (tuple == NULL) { + goto error; + } + PUSH(tuple); + } + + // stack effect: (__0 -- ) + inst(LIST_EXTEND) { + PyObject *iterable = POP(); + PyObject *list = PEEK(oparg); + PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); + if (none_val == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && + (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable))) + { + _PyErr_Clear(tstate); + _PyErr_Format(tstate, PyExc_TypeError, + "Value after * must be an iterable, not %.200s", + Py_TYPE(iterable)->tp_name); + } + Py_DECREF(iterable); + goto error; + } + Py_DECREF(none_val); + Py_DECREF(iterable); + } + + // stack effect: (__0 -- ) + inst(SET_UPDATE) { + PyObject *iterable = POP(); + PyObject *set = PEEK(oparg); + int err = _PySet_Update(set, iterable); + Py_DECREF(iterable); + if (err < 0) { + goto error; + } + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_SET) { + PyObject *set = PySet_New(NULL); + int err = 0; + int i; + if (set == NULL) + goto error; + for (i = oparg; i > 0; i--) { + PyObject *item = PEEK(i); + if (err == 0) + err = PySet_Add(set, item); + Py_DECREF(item); + } + STACK_SHRINK(oparg); + if (err != 0) { + Py_DECREF(set); + goto error; + } + PUSH(set); + } + + // stack effect: (__array[oparg*2] -- __0) + inst(BUILD_MAP) { + PyObject *map = _PyDict_FromItems( + &PEEK(2*oparg), 2, + &PEEK(2*oparg - 1), 2, + oparg); + if (map == NULL) + goto error; + + while (oparg--) { + Py_DECREF(POP()); + Py_DECREF(POP()); + } + PUSH(map); + } + + // stack effect: ( -- ) + inst(SETUP_ANNOTATIONS) { + int err; + PyObject *ann_dict; + if (LOCALS() == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when setting up annotations"); + goto error; + } + /* check if __annotations__ in locals()... */ + if (PyDict_CheckExact(LOCALS())) { + ann_dict = _PyDict_GetItemWithError(LOCALS(), + &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (_PyErr_Occurred(tstate)) { + goto error; + } + /* ...if not, create a new one */ + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyDict_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + } + else { + /* do the same if locals() is not a dict */ + ann_dict = PyObject_GetItem(LOCALS(), &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + else { + Py_DECREF(ann_dict); + } + } + } + + // stack effect: (__array[oparg] -- ) + inst(BUILD_CONST_KEY_MAP) { + PyObject *map; + PyObject *keys = TOP(); + if (!PyTuple_CheckExact(keys) || + PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { + _PyErr_SetString(tstate, PyExc_SystemError, + "bad BUILD_CONST_KEY_MAP keys argument"); + goto error; + } + map = _PyDict_FromItems( + &PyTuple_GET_ITEM(keys, 0), 1, + &PEEK(oparg + 1), 1, oparg); + if (map == NULL) { + goto error; + } + + Py_DECREF(POP()); + while (oparg--) { + Py_DECREF(POP()); + } + PUSH(map); + } + + // stack effect: (__0 -- ) + inst(DICT_UPDATE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + if (PyDict_Update(dict, update) < 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object is not a mapping", + Py_TYPE(update)->tp_name); + } + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + } + + // stack effect: (__0 -- ) + inst(DICT_MERGE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + + if (_PyDict_MergeEx(dict, update, 2) < 0) { + format_kwargs_error(tstate, PEEK(2 + oparg), update); + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + PREDICT(CALL_FUNCTION_EX); + } + + // stack effect: (__0, __1 -- ) + inst(MAP_ADD) { + PyObject *value = TOP(); + PyObject *key = SECOND(); + PyObject *map; + STACK_SHRINK(2); + map = PEEK(oparg); /* dict */ + assert(PyDict_CheckExact(map)); + /* map[key] = value */ + if (_PyDict_SetItem_Take2((PyDictObject *)map, key, value) != 0) { + goto error; + } + PREDICT(JUMP_BACKWARD); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR) { + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *name = GETITEM(names, oparg>>1); + next_instr -= OPSIZE; + _Py_Specialize_LoadAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg >> 1); + PyObject *owner = TOP(); + if (oparg & 1) { + /* Designed to work in tandem with CALL. */ + PyObject* meth = NULL; + + int meth_found = _PyObject_GetMethod(owner, name, &meth); + + if (meth == NULL) { + /* Most likely attribute wasn't found. */ + goto error; + } + + if (meth_found) { + /* We can bypass temporary bound method object. + meth is unbound method and obj is self. + + meth | self | arg1 | ... | argN + */ + SET_TOP(meth); + PUSH(owner); // self + } + else { + /* meth is not an unbound method (but a regular attr, or + something was returned by a descriptor protocol). Set + the second element of the stack to NULL, to signal + CALL that it's not a method call. + + NULL | meth | arg1 | ... | argN + */ + SET_TOP(NULL); + Py_DECREF(owner); + PUSH(meth); + } + } + else { + PyObject *res = PyObject_GetAttr(owner, name); + if (res == NULL) { + goto error; + } + Py_DECREF(owner); + SET_TOP(res); + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_INSTANCE_VALUE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_dictoffset < 0); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + res = _PyDictOrValues_GetValues(dorv)->values[cache->index]; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_MODULE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; + assert(dict != NULL); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->version), + LOAD_ATTR); + assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); + assert(cache->index < dict->ma_keys->dk_nentries); + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + cache->index; + res = ep->me_value; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_WITH_HINT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, LOAD_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg>>1); + uint16_t hint = cache->index; + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_SLOT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + char *addr = (char *)owner + cache->index; + res = *(PyObject **)addr; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_CLASS) { + assert(cframe.use_tracing == 0); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *cls = TOP(); + DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, + LOAD_ATTR); + assert(type_version != 0); + + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(cls); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_PROPERTY) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *fget = read_obj(cache->descr); + assert(Py_IS_TYPE(fget, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)fget; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 1); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(fget); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + for (int i = 1; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *getattribute = read_obj(cache->descr); + assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)getattribute; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + + PyObject *name = GETITEM(names, oparg >> 1); + Py_INCREF(f); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + new_frame->localsplus[1] = Py_NewRef(name); + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + inst(STORE_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, value, owner --)) { + assert(cframe.use_tracing == 0); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); + STAT_INC(STORE_ATTR, hit); + PyDictValues *values = _PyDictOrValues_GetValues(dorv); + PyObject *old_value = values->values[index]; + values->values[index] = value; + if (old_value == NULL) { + _PyDictValues_AddToInsertionOrder(values, index); + } + else { + Py_DECREF(old_value); + } + Py_DECREF(owner); + } + + inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) { + assert(cframe.use_tracing == 0); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), STORE_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, STORE_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg); + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR); + PyObject *old_value; + uint64_t new_version; + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + Py_DECREF(old_value); + STAT_INC(STORE_ATTR, hit); + /* Ensure dict is GC tracked if it needs to be */ + if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { + _PyObject_GC_TRACK(dict); + } + /* PEP 509 */ + dict->ma_version_tag = new_version; + Py_DECREF(owner); + } + + inst(STORE_ATTR_SLOT, (unused/1, type_version/2, index/1, value, owner --)) { + assert(cframe.use_tracing == 0); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + char *addr = (char *)owner + index; + STAT_INC(STORE_ATTR, hit); + PyObject *old_value = *(PyObject **)addr; + *(PyObject **)addr = value; + Py_XDECREF(old_value); + Py_DECREF(owner); + } + + family(compare_op) = { + COMPARE_OP, + _COMPARE_OP_FLOAT, + _COMPARE_OP_INT, + _COMPARE_OP_STR, + }; + + inst(COMPARE_OP, (unused/2, left, right -- res)) { + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_CompareOp(left, right, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(COMPARE_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(oparg <= Py_GE); + res = PyObject_RichCompare(left, right, oparg); + Py_DECREF(left); + Py_DECREF(right); + ERROR_IF(res == NULL, error); + } + + // The result is an int disguised as an object pointer. + op(_COMPARE_OP_FLOAT, (unused/1, when_to_jump_mask/1, left, right -- jump: size_t)) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false) + DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); + double dleft = PyFloat_AS_DOUBLE(left); + double dright = PyFloat_AS_DOUBLE(right); + // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask + int sign_ish = 2*(dleft > dright) + 2 - (dleft < dright); + DEOPT_IF(isnan(dleft), COMPARE_OP); + DEOPT_IF(isnan(dright), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + jump = sign_ish & when_to_jump_mask; + } + // The input is an int disguised as an object pointer! + op(_JUMP_IF, (jump: size_t --)) { + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + if (jump) { + JUMPBY(oparg); + } + } + // We're praying that the compiler optimizes the flags manipuations. + super(COMPARE_OP_FLOAT_JUMP) = _COMPARE_OP_FLOAT + _JUMP_IF; + + // Similar to COMPARE_OP_FLOAT + op(_COMPARE_OP_INT, (unused/1, when_to_jump_mask/1, left, right -- jump: size_t)) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false) + DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); + Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0]; + Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0]; + // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask + int sign_ish = 2*(ileft > iright) + 2 - (ileft < iright); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + jump = sign_ish & when_to_jump_mask; + } + super(COMPARE_OP_INT_JUMP) = _COMPARE_OP_INT + _JUMP_IF; + + // Similar to COMPARE_OP_FLOAT, but for ==, != only + op(_COMPARE_OP_STR, (unused/1, invert/1, left, right -- jump: size_t)) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false) + DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + int res = _PyUnicode_Equal(left, right); + assert(oparg == Py_EQ || oparg == Py_NE); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + assert(res == 0 || res == 1); + assert(invert == 0 || invert == 1); + jump = res ^ invert; + } + super(COMPARE_OP_STR_JUMP) = _COMPARE_OP_STR + _JUMP_IF; + + // stack effect: (__0 -- ) + inst(IS_OP) { + PyObject *right = POP(); + PyObject *left = TOP(); + int res = Py_Is(left, right) ^ oparg; + PyObject *b = res ? Py_True : Py_False; + SET_TOP(Py_NewRef(b)); + Py_DECREF(left); + Py_DECREF(right); + } + + // stack effect: (__0 -- ) + inst(CONTAINS_OP) { + PyObject *right = POP(); + PyObject *left = POP(); + int res = PySequence_Contains(right, left); + Py_DECREF(left); + Py_DECREF(right); + if (res < 0) { + goto error; + } + PyObject *b = (res^oparg) ? Py_True : Py_False; + PUSH(Py_NewRef(b)); + } + + // stack effect: ( -- ) + inst(CHECK_EG_MATCH) { + PyObject *match_type = POP(); + if (check_except_star_type_valid(tstate, match_type) < 0) { + Py_DECREF(match_type); + goto error; + } + + PyObject *exc_value = TOP(); + PyObject *match = NULL, *rest = NULL; + int res = exception_group_match(exc_value, match_type, + &match, &rest); + Py_DECREF(match_type); + if (res < 0) { + goto error; + } + + if (match == NULL || rest == NULL) { + assert(match == NULL); + assert(rest == NULL); + goto error; + } + if (Py_IsNone(match)) { + PUSH(match); + Py_XDECREF(rest); + } + else { + /* Total or partial match - update the stack from + * [val] + * to + * [rest, match] + * (rest can be Py_None) + */ + + SET_TOP(rest); + PUSH(match); + PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); + Py_DECREF(exc_value); + } + } + + // stack effect: ( -- ) + inst(CHECK_EXC_MATCH) { + PyObject *right = POP(); + PyObject *left = TOP(); + assert(PyExceptionInstance_Check(left)); + if (check_except_type_valid(tstate, right) < 0) { + Py_DECREF(right); + goto error; + } + + int res = PyErr_GivenExceptionMatches(left, right); + Py_DECREF(right); + PUSH(Py_NewRef(res ? Py_True : Py_False)); + } + + // stack effect: (__0 -- ) + inst(IMPORT_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *fromlist = POP(); + PyObject *level = TOP(); + PyObject *res; + res = import_name(tstate, frame, name, fromlist, level); + Py_DECREF(level); + Py_DECREF(fromlist); + SET_TOP(res); + if (res == NULL) + goto error; + } + + // stack effect: (__0 -- ) + inst(IMPORT_STAR) { + PyObject *from = POP(), *locals; + int err; + if (_PyFrame_FastToLocalsWithError(frame) < 0) { + Py_DECREF(from); + goto error; + } + + locals = LOCALS(); + if (locals == NULL) { + _PyErr_SetString(tstate, PyExc_SystemError, + "no locals found during 'import *'"); + Py_DECREF(from); + goto error; + } + err = import_all_from(tstate, locals, from); + _PyFrame_LocalsToFast(frame, 0); + Py_DECREF(from); + if (err != 0) + goto error; + } + + // stack effect: ( -- __0) + inst(IMPORT_FROM) { + PyObject *name = GETITEM(names, oparg); + PyObject *from = TOP(); + PyObject *res; + res = import_from(tstate, from, name); + PUSH(res); + if (res == NULL) + goto error; + } + + // stack effect: ( -- ) + inst(JUMP_FORWARD) { + JUMPBY(oparg); + } + + // stack effect: ( -- ) + inst(JUMP_BACKWARD) { + assert(oparg < INSTR_OFFSET()); + JUMPBY(-oparg); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_FALSE) { + PyObject *cond = POP(); + if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) + ; + else if (err == 0) { + JUMPBY(oparg); + } + else + goto error; + } + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_TRUE) { + PyObject *cond = POP(); + if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) + ; + else + goto error; + } + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_NOT_NONE) { + PyObject *value = POP(); + if (!Py_IsNone(value)) { + JUMPBY(oparg); + } + Py_DECREF(value); + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_NONE) { + PyObject *value = POP(); + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + JUMPBY(oparg); + } + else { + Py_DECREF(value); + } + } + + // error: JUMP_IF_FALSE_OR_POP stack effect depends on jump flag + inst(JUMP_IF_FALSE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsTrue(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else if (err == 0) { + JUMPBY(oparg); + } + else { + goto error; + } + } + } + + // error: JUMP_IF_TRUE_OR_POP stack effect depends on jump flag + inst(JUMP_IF_TRUE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsFalse(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else { + goto error; + } + } + } + + // stack effect: ( -- ) + inst(JUMP_BACKWARD_NO_INTERRUPT) { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + JUMPBY(-oparg); + } + + // stack effect: ( -- __0) + inst(GET_LEN) { + // PUSH(len(TOS)) + Py_ssize_t len_i = PyObject_Length(TOP()); + if (len_i < 0) { + goto error; + } + PyObject *len_o = PyLong_FromSsize_t(len_i); + if (len_o == NULL) { + goto error; + } + PUSH(len_o); + } + + // stack effect: (__0, __1 -- ) + inst(MATCH_CLASS) { + // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or + // None on failure. + PyObject *names = POP(); + PyObject *type = POP(); + PyObject *subject = TOP(); + assert(PyTuple_CheckExact(names)); + PyObject *attrs = match_class(tstate, subject, type, oparg, names); + Py_DECREF(names); + Py_DECREF(type); + if (attrs) { + // Success! + assert(PyTuple_CheckExact(attrs)); + SET_TOP(attrs); + } + else if (_PyErr_Occurred(tstate)) { + // Error! + goto error; + } + else { + // Failure! + SET_TOP(Py_NewRef(Py_None)); + } + Py_DECREF(subject); + } + + // stack effect: ( -- __0) + inst(MATCH_MAPPING) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + } + + // stack effect: ( -- __0) + inst(MATCH_SEQUENCE) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + } + + // stack effect: ( -- __0) + inst(MATCH_KEYS) { + // On successful match, PUSH(values). Otherwise, PUSH(None). + PyObject *keys = TOP(); + PyObject *subject = SECOND(); + PyObject *values_or_none = match_keys(tstate, subject, keys); + if (values_or_none == NULL) { + goto error; + } + PUSH(values_or_none); + } + + // stack effect: ( -- ) + inst(GET_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + } + + // stack effect: ( -- ) + inst(GET_YIELD_FROM_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter; + if (PyCoro_CheckExact(iterable)) { + /* `iterable` is a coroutine */ + if (!(frame->f_code->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) { + /* and it is used in a 'yield from' expression of a + regular generator. */ + Py_DECREF(iterable); + SET_TOP(NULL); + _PyErr_SetString(tstate, PyExc_TypeError, + "cannot 'yield from' a coroutine object " + "in a non-coroutine generator"); + goto error; + } + } + else if (!PyGen_CheckExact(iterable)) { + /* `iterable` is not a generator. */ + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + } + PREDICT(LOAD_CONST); + } + + // stack effect: ( -- __0) + inst(FOR_ITER) { + _PyForIterCache *cache = (_PyForIterCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_ForIter(TOP(), next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(FOR_ITER, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* before: [iter]; after: [iter, iter()] *or* [] */ + PyObject *iter = TOP(); + PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); + if (next != NULL) { + PUSH(next); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + } + else { + if (_PyErr_Occurred(tstate)) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { + goto error; + } + else if (tstate->c_tracefunc != NULL) { + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + } + _PyErr_Clear(tstate); + } + /* iterator ended normally */ + assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR); + STACK_SHRINK(1); + Py_DECREF(iter); + /* Skip END_FOR */ + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + } + } + + // stack effect: ( -- __0) + inst(FOR_ITER_LIST) { + assert(cframe.use_tracing == 0); + _PyListIterObject *it = (_PyListIterObject *)TOP(); + DEOPT_IF(Py_TYPE(it) != &PyListIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + PyListObject *seq = it->it_seq; + if (seq) { + if (it->it_index < PyList_GET_SIZE(seq)) { + PyObject *next = PyList_GET_ITEM(seq, it->it_index++); + PUSH(Py_NewRef(next)); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + goto end_for_iter_list; // End of this instruction + } + it->it_seq = NULL; + Py_DECREF(seq); + } + STACK_SHRINK(1); + Py_DECREF(it); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + end_for_iter_list: + } + + // stack effect: ( -- __0) + inst(FOR_ITER_TUPLE) { + assert(cframe.use_tracing == 0); + _PyTupleIterObject *it = (_PyTupleIterObject *)TOP(); + DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + PyTupleObject *seq = it->it_seq; + if (seq) { + if (it->it_index < PyTuple_GET_SIZE(seq)) { + PyObject *next = PyTuple_GET_ITEM(seq, it->it_index++); + PUSH(Py_NewRef(next)); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + goto end_for_iter_tuple; // End of this instruction + } + it->it_seq = NULL; + Py_DECREF(seq); + } + STACK_SHRINK(1); + Py_DECREF(it); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + end_for_iter_tuple: + } + + // stack effect: ( -- __0) + inst(FOR_ITER_RANGE) { + assert(cframe.use_tracing == 0); + _PyRangeIterObject *r = (_PyRangeIterObject *)TOP(); + DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER]; + assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST); + if (r->len <= 0) { + STACK_SHRINK(1); + Py_DECREF(r); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + } + else { + long value = r->start; + r->start = value + r->step; + r->len--; + if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) { + goto error; + } + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + OPSIZE); + } + } + + inst(FOR_ITER_GEN) { + assert(cframe.use_tracing == 0); + PyGenObject *gen = (PyGenObject *)TOP(); + DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); + DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + frame->yield_offset = oparg; + _PyFrame_StackPush(gen_frame, Py_NewRef(Py_None)); + gen->gi_frame_state = FRAME_EXECUTING; + gen->gi_exc_state.previous_item = tstate->exc_info; + tstate->exc_info = &gen->gi_exc_state; + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); + assert(_Py_OPCODE(*next_instr) == END_FOR); + DISPATCH_INLINED(gen_frame); + } + + // stack effect: ( -- __0) + inst(BEFORE_ASYNC_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol " + "(missed __aexit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) + goto error; + PUSH(res); + PREDICT(GET_AWAITABLE); + } + + // stack effect: ( -- __0) + inst(BEFORE_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol " + "(missed __exit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) { + goto error; + } + PUSH(res); + } + + inst(WITH_EXCEPT_START, (exit_func, lasti, unused, val -- exit_func, lasti, unused, val, res)) { + /* At the top of the stack are 4 values: + - val: TOP = exc_info() + - unused: SECOND = previous exception + - lasti: THIRD = lasti of exception in exc_info() + - exit_func: FOURTH = the context.__exit__ bound method + We call FOURTH(type(TOP), TOP, GetTraceback(TOP)). + Then we push the __exit__ return value. + */ + PyObject *exc, *tb; + + assert(val && PyExceptionInstance_Check(val)); + exc = PyExceptionInstance_Class(val); + tb = PyException_GetTraceback(val); + Py_XDECREF(tb); + assert(PyLong_Check(lasti)); + (void)lasti; // Shut up compiler warning if asserts are off + PyObject *stack[4] = {NULL, exc, val, tb}; + res = PyObject_Vectorcall(exit_func, stack + 1, + 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); + ERROR_IF(res == NULL, error); + } + + // stack effect: ( -- __0) + inst(PUSH_EXC_INFO) { + PyObject *value = TOP(); + + _PyErr_StackItem *exc_info = tstate->exc_info; + if (exc_info->exc_value != NULL) { + SET_TOP(exc_info->exc_value); + } + else { + SET_TOP(Py_NewRef(Py_None)); + } + + PUSH(Py_NewRef(value)); + assert(PyExceptionInstance_Check(value)); + exc_info->exc_value = value; + + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_WITH_VALUES) { + /* Cached method object */ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + assert(type_version != 0); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; + DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + read_u32(cache->keys_version), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_WITH_DICT) { + /* Can be either a managed dict, or a tp_dictoffset offset.*/ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + DEOPT_IF(self_cls->tp_version_tag != read_u32(cache->type_version), + LOAD_ATTR); + /* Treat index as a signed 16 bit value */ + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyDictObject **dictptr = (PyDictObject**)(((char *)self)+dictoffset); + PyDictObject *dict = *dictptr; + DEOPT_IF(dict == NULL, LOAD_ATTR); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->keys_version), + LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_NO_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_dictoffset == 0); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_LAZY_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyObject *dict = *(PyObject **)((char *)self + dictoffset); + /* This object has a __dict__, just not yet created */ + DEOPT_IF(dict != NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_BOUND_METHOD_EXACT_ARGS) { + DEOPT_IF(is_method(stack_pointer, oparg), CALL); + PyObject *function = PEEK(oparg + 1); + DEOPT_IF(Py_TYPE(function) != &PyMethod_Type, CALL); + STAT_INC(CALL, hit); + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg + 1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg + 2) = Py_NewRef(meth); + Py_DECREF(function); + GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); + } + + // stack effect: ( -- ) + inst(KW_NAMES) { + assert(kwnames == NULL); + assert(oparg < PyTuple_GET_SIZE(consts)); + kwnames = GETITEM(consts, oparg); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL) { + _PyCallCache *cache = (_PyCallCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + int is_meth = is_method(stack_pointer, oparg); + int nargs = oparg + is_meth; + PyObject *callable = PEEK(nargs + 1); + next_instr -= OPSIZE; + _Py_Specialize_Call(callable, next_instr, nargs, kwnames); + DISPATCH_SAME_OPARG(); + } + STAT_INC(CALL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int total_args, is_meth; + is_meth = is_method(stack_pointer, oparg); + PyObject *function = PEEK(oparg + 1); + if (!is_meth && Py_TYPE(function) == &PyMethod_Type) { + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg+1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg+2) = Py_NewRef(meth); + Py_DECREF(function); + is_meth = 1; + } + total_args = oparg + is_meth; + function = PEEK(total_args + 1); + int positional_args = total_args - KWNAMES_LEN(); + // Check if the call can be inlined or not + if (Py_TYPE(function) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)function)->vectorcall == _PyFunction_Vectorcall) + { + int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(function)); + STACK_SHRINK(total_args); + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( + tstate, (PyFunctionObject *)function, locals, + stack_pointer, positional_args, kwnames + ); + kwnames = NULL; + STACK_SHRINK(2-is_meth); + // The frame has stolen all the arguments from the stack, + // so there is no need to clean them up. + if (new_frame == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + /* Callable is not a normal Python function */ + PyObject *res; + if (cframe.use_tracing) { + res = trace_call_function( + tstate, function, stack_pointer-total_args, + positional_args, kwnames); + } + else { + res = PyObject_Vectorcall( + function, stack_pointer-total_args, + positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames); + } + kwnames = NULL; + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(function); + /* Clear the stack */ + STACK_SHRINK(total_args); + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_PY_EXACT_ARGS) { + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(code->co_argcount != argcount, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_PY_WITH_DEFAULTS) { + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(argcount > code->co_argcount, CALL); + int minargs = cache->min_args; + DEOPT_IF(argcount < minargs, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_argcount; i++) { + PyObject *def = PyTuple_GET_ITEM(func->func_defaults, + i - minargs); + new_frame->localsplus[i] = Py_NewRef(def); + } + for (int i = code->co_argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_TYPE_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *obj = TOP(); + PyObject *callable = SECOND(); + DEOPT_IF(callable != (PyObject *)&PyType_Type, CALL); + STAT_INC(CALL, hit); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + PyObject *res = Py_NewRef(Py_TYPE(obj)); + Py_DECREF(callable); + Py_DECREF(obj); + STACK_SHRINK(2); + SET_TOP(res); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_STR_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PyObject_Str(arg); + Py_DECREF(arg); + Py_DECREF(&PyUnicode_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_TUPLE_1) { + assert(kwnames == NULL); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyTuple_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PySequence_Tuple(arg); + Py_DECREF(arg); + Py_DECREF(&PyTuple_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_BUILTIN_CLASS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + int kwnames_len = KWNAMES_LEN(); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyType_Check(callable), CALL); + PyTypeObject *tp = (PyTypeObject *)callable; + DEOPT_IF(tp->tp_vectorcall == NULL, CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + PyObject *res = tp->tp_vectorcall((PyObject *)tp, stack_pointer, + total_args-kwnames_len, kwnames); + kwnames = NULL; + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(tp); + STACK_SHRINK(1-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_BUILTIN_O) { + assert(cframe.use_tracing == 0); + /* Builtin METH_O functions */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *arg = TOP(); + PyObject *res = _PyCFunction_TrampolineCall(cfunc, PyCFunction_GET_SELF(callable), arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + Py_DECREF(arg); + Py_DECREF(callable); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_BUILTIN_FAST) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL functions, without keywords */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL, + CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs) */ + PyObject *res = ((_PyCFunctionFast)(void(*)(void))cfunc)( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + /* Not deopting because this doesn't mean our optimization was + wrong. `res` can be NULL for valid reasons. Eg. getattr(x, + 'invalid'). In those cases an exception is set, so we must + handle it. + */ + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_BUILTIN_FAST_WITH_KEYWORDS) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != + (METH_FASTCALL | METH_KEYWORDS), CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs, kwnames) */ + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void)) + PyCFunction_GET_FUNCTION(callable); + PyObject *res = cfunc( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args - KWNAMES_LEN(), + kwnames + ); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_LEN) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* len(o) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.len, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + Py_ssize_t len_i = PyObject_Length(arg); + if (len_i < 0) { + goto error; + } + PyObject *res = PyLong_FromSsize_t(len_i); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + Py_DECREF(arg); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_ISINSTANCE) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* isinstance(o, o2) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); + STAT_INC(CALL, hit); + PyObject *cls = POP(); + PyObject *inst = TOP(); + int retval = PyObject_IsInstance(inst, cls); + if (retval < 0) { + Py_DECREF(cls); + goto error; + } + PyObject *res = PyBool_FromLong(retval); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(inst); + Py_DECREF(cls); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_LIST_APPEND) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + assert(oparg == 1); + PyObject *callable = PEEK(3); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.list_append, CALL); + PyObject *list = SECOND(); + DEOPT_IF(!PyList_Check(list), CALL); + STAT_INC(CALL, hit); + PyObject *arg = POP(); + if (_PyList_AppendTakeRef((PyListObject *)list, arg) < 0) { + goto error; + } + STACK_SHRINK(2); + Py_DECREF(list); + Py_DECREF(callable); + // CALL + POP_TOP + JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); + assert(_Py_OPCODE(next_instr[-1]) == POP_TOP); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_METHOD_DESCRIPTOR_O) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_O, CALL); + PyObject *arg = TOP(); + PyObject *self = SECOND(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + Py_DECREF(arg); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); + PyTypeObject *d_type = callable->d_common.d_type; + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); + STAT_INC(CALL, hit); + int nargs = total_args-1; + STACK_SHRINK(nargs); + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; + PyObject *res = cfunc(self, stack_pointer, nargs - KWNAMES_LEN(), + kwnames); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { + assert(kwnames == NULL); + assert(oparg == 0 || oparg == 1); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + PyObject *self = TOP(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, NULL); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + /* Builtin METH_FASTCALL methods, without keywords */ + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + _PyCFunctionFast cfunc = + (_PyCFunctionFast)(void(*)(void))meth->ml_meth; + int nargs = total_args-1; + STACK_SHRINK(nargs); + PyObject *res = cfunc(self, stack_pointer, nargs); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + /* Clear the stack of the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // error: CALL_FUNCTION_EX has irregular stack effect + inst(CALL_FUNCTION_EX) { + PyObject *func, *callargs, *kwargs = NULL, *result; + if (oparg & 0x01) { + kwargs = POP(); + // DICT_MERGE is called before this opcode if there are kwargs. + // It converts all dict subtypes in kwargs into regular dicts. + assert(PyDict_CheckExact(kwargs)); + } + callargs = POP(); + func = TOP(); + if (!PyTuple_CheckExact(callargs)) { + if (check_args_iterable(tstate, func, callargs) < 0) { + Py_DECREF(callargs); + goto error; + } + Py_SETREF(callargs, PySequence_Tuple(callargs)); + if (callargs == NULL) { + goto error; + } + } + assert(PyTuple_CheckExact(callargs)); + + result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); + Py_DECREF(func); + Py_DECREF(callargs); + Py_XDECREF(kwargs); + + STACK_SHRINK(1); + assert(TOP() == NULL); + SET_TOP(result); + if (result == NULL) { + goto error; + } + CHECK_EVAL_BREAKER(); + } + + // error: MAKE_FUNCTION has irregular stack effect + inst(MAKE_FUNCTION) { + PyObject *codeobj = POP(); + PyFunctionObject *func = (PyFunctionObject *) + PyFunction_New(codeobj, GLOBALS()); + + Py_DECREF(codeobj); + if (func == NULL) { + goto error; + } + + if (oparg & 0x08) { + assert(PyTuple_CheckExact(TOP())); + func->func_closure = POP(); + } + if (oparg & 0x04) { + assert(PyTuple_CheckExact(TOP())); + func->func_annotations = POP(); + } + if (oparg & 0x02) { + assert(PyDict_CheckExact(TOP())); + func->func_kwdefaults = POP(); + } + if (oparg & 0x01) { + assert(PyTuple_CheckExact(TOP())); + func->func_defaults = POP(); + } + + func->func_version = ((PyCodeObject *)codeobj)->co_version; + PUSH((PyObject *)func); + } + + // stack effect: ( -- ) + inst(RETURN_GENERATOR) { + assert(PyFunction_Check(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); + if (gen == NULL) { + goto error; + } + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + _PyFrame_Copy(frame, gen_frame); + assert(frame->frame_obj == NULL); + gen->gi_frame_state = FRAME_CREATED; + gen_frame->owner = FRAME_OWNED_BY_GENERATOR; + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + _PyInterpreterFrame *prev = frame->previous; + _PyThreadState_PopFrame(tstate, frame); + frame = cframe.current_frame = prev; + _PyFrame_StackPush(frame, (PyObject *)gen); + goto resume_frame; + } + + // error: BUILD_SLICE has irregular stack effect + inst(BUILD_SLICE) { + PyObject *start, *stop, *step, *slice; + if (oparg == 3) + step = POP(); + else + step = NULL; + stop = POP(); + start = TOP(); + slice = PySlice_New(start, stop, step); + Py_DECREF(start); + Py_DECREF(stop); + Py_XDECREF(step); + SET_TOP(slice); + if (slice == NULL) + goto error; + } + + // error: FORMAT_VALUE has irregular stack effect + inst(FORMAT_VALUE) { + /* Handles f-string value formatting. */ + PyObject *result; + PyObject *fmt_spec; + PyObject *value; + PyObject *(*conv_fn)(PyObject *); + int which_conversion = oparg & FVC_MASK; + int have_fmt_spec = (oparg & FVS_MASK) == FVS_HAVE_SPEC; + + fmt_spec = have_fmt_spec ? POP() : NULL; + value = POP(); + + /* See if any conversion is specified. */ + switch (which_conversion) { + case FVC_NONE: conv_fn = NULL; break; + case FVC_STR: conv_fn = PyObject_Str; break; + case FVC_REPR: conv_fn = PyObject_Repr; break; + case FVC_ASCII: conv_fn = PyObject_ASCII; break; + default: + _PyErr_Format(tstate, PyExc_SystemError, + "unexpected conversion flag %d", + which_conversion); + goto error; + } + + /* If there's a conversion function, call it and replace + value with that result. Otherwise, just use value, + without conversion. */ + if (conv_fn != NULL) { + result = conv_fn(value); + Py_DECREF(value); + if (result == NULL) { + Py_XDECREF(fmt_spec); + goto error; + } + value = result; + } + + /* If value is a unicode object, and there's no fmt_spec, + then we know the result of format(value) is value + itself. In that case, skip calling format(). I plan to + move this optimization in to PyObject_Format() + itself. */ + if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { + /* Do nothing, just transfer ownership to result. */ + result = value; + } else { + /* Actually call format(). */ + result = PyObject_Format(value, fmt_spec); + Py_DECREF(value); + Py_XDECREF(fmt_spec); + if (result == NULL) { + goto error; + } + } + + PUSH(result); + } + + // stack effect: ( -- __0) + inst(COPY) { + assert(oparg != 0); + PyObject *peek = PEEK(oparg); + PUSH(Py_NewRef(peek)); + } + + inst(BINARY_OP, (unused/1, lhs, rhs -- res)) { + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(0 <= oparg); + assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); + assert(binary_ops[oparg]); + res = binary_ops[oparg](lhs, rhs); + Py_DECREF(lhs); + Py_DECREF(rhs); + ERROR_IF(res == NULL, error); + } + + // stack effect: ( -- ) + inst(SWAP) { + assert(oparg != 0); + PyObject *top = TOP(); + SET_TOP(PEEK(oparg)); + PEEK(oparg) = top; + } + + // stack effect: ( -- ) + inst(EXTENDED_ARG) { + assert(oparg); + assert(cframe.use_tracing == 0); + opcode = _Py_OPCODE(*next_instr); + oparg = oparg << 8 | _Py_OPARG(*next_instr); + PRE_DISPATCH_GOTO(); + DISPATCH_GOTO(); + } + + // stack effect: ( -- ) + inst(CACHE) { + Py_UNREACHABLE(); + } + + +// END BYTECODES // + + } + error: + exception_unwind: + handle_eval_breaker: + resume_frame: + resume_with_error: + start_frame: + unbound_local_error: + ; +} + +// Future families go below this point // + +family(call) = { + CALL, CALL_PY_EXACT_ARGS, + CALL_PY_WITH_DEFAULTS, CALL_BOUND_METHOD_EXACT_ARGS, CALL_BUILTIN_CLASS, + CALL_BUILTIN_FAST_WITH_KEYWORDS, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, CALL_NO_KW_BUILTIN_FAST, + CALL_NO_KW_BUILTIN_O, CALL_NO_KW_ISINSTANCE, CALL_NO_KW_LEN, + CALL_NO_KW_LIST_APPEND, CALL_NO_KW_METHOD_DESCRIPTOR_FAST, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS, + CALL_NO_KW_METHOD_DESCRIPTOR_O, CALL_NO_KW_STR_1, CALL_NO_KW_TUPLE_1, + CALL_NO_KW_TYPE_1 }; +family(for_iter) = { + FOR_ITER, FOR_ITER_LIST, + FOR_ITER_RANGE }; +family(load_attr) = { + LOAD_ATTR, LOAD_ATTR_CLASS, + LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_MODULE, + LOAD_ATTR_PROPERTY, LOAD_ATTR_SLOT, LOAD_ATTR_WITH_HINT, + LOAD_ATTR_METHOD_LAZY_DICT, LOAD_ATTR_METHOD_NO_DICT, LOAD_ATTR_METHOD_WITH_DICT, + LOAD_ATTR_METHOD_WITH_VALUES }; +family(load_const) = { LOAD_CONST, LOAD_CONST__LOAD_FAST }; +family(load_fast) = { LOAD_FAST, LOAD_FAST__LOAD_CONST, LOAD_FAST__LOAD_FAST }; +family(load_global) = { + LOAD_GLOBAL, LOAD_GLOBAL_BUILTIN, + LOAD_GLOBAL_MODULE }; +family(store_fast) = { STORE_FAST, STORE_FAST__LOAD_FAST, STORE_FAST__STORE_FAST }; +family(unpack_sequence) = { + UNPACK_SEQUENCE, UNPACK_SEQUENCE_LIST, + UNPACK_SEQUENCE_TUPLE, UNPACK_SEQUENCE_TWO_TUPLE }; diff --git a/Python/ceval.c b/Python/ceval.c index 100aa3d727e468..1ebe96798d3008 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -143,7 +143,7 @@ lltrace_instruction(_PyInterpreterFrame *frame, const char *opname = _PyOpcode_OpName[opcode]; assert(opname != NULL); int offset = (int)(next_instr - _PyCode_CODE(frame->f_code)); - if (HAS_ARG(opcode)) { + if (HAS_ARG((int)_PyOpcode_Deopt[opcode])) { printf("%d: %s %d\n", offset * 2, opname, oparg); } else { @@ -155,7 +155,10 @@ static void lltrace_resume_frame(_PyInterpreterFrame *frame) { PyObject *fobj = frame->f_funcobj; - if (fobj == NULL || !PyFunction_Check(fobj)) { + if (frame->owner == FRAME_OWNED_BY_CSTACK || + fobj == NULL || + !PyFunction_Check(fobj) + ) { printf("\nResuming frame."); return; } @@ -391,8 +394,7 @@ match_keys(PyThreadState *tstate, PyObject *map, PyObject *keys) Py_DECREF(value); Py_DECREF(values); // Return None: - Py_INCREF(Py_None); - values = Py_None; + values = Py_NewRef(Py_None); goto done; } PyTuple_SET_ITEM(values, i, value); @@ -666,21 +668,26 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) #ifdef Py_STATS #define INSTRUCTION_START(op) \ do { \ - frame->prev_instr = next_instr++; \ + frame->prev_instr = next_instr; \ + next_instr += OPSIZE; \ OPCODE_EXE_INC(op); \ if (_py_stats) _py_stats->opcode_stats[lastopcode].pair_count[op]++; \ lastopcode = op; \ } while (0) #else -#define INSTRUCTION_START(op) (frame->prev_instr = next_instr++) +#define INSTRUCTION_START(op) \ + do { \ + frame->prev_instr = next_instr; \ + next_instr += OPSIZE; \ + } while (0) #endif #if USE_COMPUTED_GOTOS -#define TARGET(op) TARGET_##op: INSTRUCTION_START(op); -#define DISPATCH_GOTO() goto *opcode_targets[opcode] +# define TARGET(op) TARGET_##op: INSTRUCTION_START(op); +# define DISPATCH_GOTO() goto *opcode_targets[opcode] #else -#define TARGET(op) case op: INSTRUCTION_START(op); -#define DISPATCH_GOTO() goto dispatch_opcode +# define TARGET(op) case op: TARGET_##op: INSTRUCTION_START(op); +# define DISPATCH_GOTO() goto dispatch_opcode #endif /* PRE_DISPATCH_GOTO() does lltrace if enabled. Normally a no-op */ @@ -710,6 +717,16 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) DISPATCH_GOTO(); \ } +#define DISPATCH_INLINED(NEW_FRAME) \ + do { \ + _PyFrame_SetStackPointer(frame, stack_pointer); \ + frame->prev_instr = next_instr - OPSIZE; \ + (NEW_FRAME)->previous = frame; \ + frame = cframe.current_frame = (NEW_FRAME); \ + CALL_STAT_INC(inlined_py_calls); \ + goto start_frame; \ + } while (0) + #define CHECK_EVAL_BREAKER() \ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); \ if (_Py_atomic_load_relaxed_int32(eval_breaker)) { \ @@ -733,22 +750,23 @@ GETITEM(PyObject *v, Py_ssize_t i) { /* Code access macros */ +#define VERBOSE 0 + /* The integer overflow is checked by an assertion below. */ -#define INSTR_OFFSET() ((int)(next_instr - first_instr)) +#define INSTR_OFFSET() ((int)(next_instr - _PyCode_CODE(frame->f_code))) #define NEXTOPARG() do { \ _Py_CODEUNIT word = *next_instr; \ opcode = _Py_OPCODE(word); \ - oparg = _Py_OPARG(word); \ + oparg1 = oparg = _Py_OPARG(word); \ + if (VERBOSE) fprintf(stderr, "[%d] next_instr = %p opcode = %d\n", __LINE__, next_instr, opcode); \ + word = *(next_instr +1); \ + oparg2 = _Py_OPARG2(word); \ + oparg3 = _Py_OPARG3(word); \ + if (VERBOSE) fprintf(stderr, "%d (%d, %d, %d)\n", opcode, oparg, oparg2, oparg3); \ } while (0) -#define JUMPTO(x) (next_instr = first_instr + (x)) +#define JUMPTO(x) (next_instr = _PyCode_CODE(frame->f_code) + (x)) #define JUMPBY(x) (next_instr += (x)) -/* Get opcode and oparg from original instructions, not quickened form. */ -#define TRACING_NEXTOPARG() do { \ - NEXTOPARG(); \ - opcode = _PyOpcode_Deopt[opcode]; \ - } while (0) - /* OpCode prediction macros Some opcodes tend to come in pairs thus making it possible to predict the second code when the first is run. For example, @@ -806,12 +824,16 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define THIRD() (stack_pointer[-3]) #define FOURTH() (stack_pointer[-4]) #define PEEK(n) (stack_pointer[-(n)]) +#define POKE(n, v) (stack_pointer[-(n)] = (v)) #define SET_TOP(v) (stack_pointer[-1] = (v)) #define SET_SECOND(v) (stack_pointer[-2] = (v)) #define BASIC_STACKADJ(n) (stack_pointer += n) #define BASIC_PUSH(v) (*stack_pointer++ = (v)) #define BASIC_POP() (*--stack_pointer) +#define REG(n) (frame->localsplus[n]) + + #ifdef Py_DEBUG #define PUSH(v) do { \ BASIC_PUSH(v); \ @@ -851,8 +873,31 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op) +#ifdef Py_STATS +#define UPDATE_MISS_STATS(INSTNAME) \ + do { \ + STAT_INC(opcode, miss); \ + STAT_INC((INSTNAME), miss); \ + /* The counter is always the first cache entry: */ \ + if (ADAPTIVE_COUNTER_IS_ZERO(next_instr->cache)) { \ + STAT_INC((INSTNAME), deopt); \ + } \ + else { \ + /* This is about to be (incorrectly) incremented: */ \ + STAT_DEC((INSTNAME), deferred); \ + } \ + } while (0) +#else +#define UPDATE_MISS_STATS(INSTNAME) ((void)0) +#endif -#define DEOPT_IF(cond, instname) if (cond) { goto miss; } +#define DEOPT_IF(COND, INSTNAME) \ + if ((COND)) { \ + /* This is only a single jump on release builds! */ \ + UPDATE_MISS_STATS((INSTNAME)); \ + assert(_PyOpcode_Deopt[opcode] == (INSTNAME)); \ + GO_TO_INSTRUCTION(INSTNAME); \ + } #define GLOBALS() frame->f_globals @@ -904,11 +949,23 @@ GETITEM(PyObject *v, Py_ssize_t i) { dtrace_function_entry(frame); \ } -#define ADAPTIVE_COUNTER_IS_ZERO(cache) \ - (cache)->counter < (1<<ADAPTIVE_BACKOFF_BITS) +#define ADAPTIVE_COUNTER_IS_ZERO(COUNTER) \ + (((COUNTER) >> ADAPTIVE_BACKOFF_BITS) == 0) + +#define ADAPTIVE_COUNTER_IS_MAX(COUNTER) \ + (((COUNTER) >> ADAPTIVE_BACKOFF_BITS) == ((1 << MAX_BACKOFF_VALUE) - 1)) -#define DECREMENT_ADAPTIVE_COUNTER(cache) \ - (cache)->counter -= (1<<ADAPTIVE_BACKOFF_BITS) +#define DECREMENT_ADAPTIVE_COUNTER(COUNTER) \ + do { \ + assert(!ADAPTIVE_COUNTER_IS_ZERO((COUNTER))); \ + (COUNTER) -= (1 << ADAPTIVE_BACKOFF_BITS); \ + } while (0); + +#define INCREMENT_ADAPTIVE_COUNTER(COUNTER) \ + do { \ + assert(!ADAPTIVE_COUNTER_IS_MAX((COUNTER))); \ + (COUNTER) += (1 << ADAPTIVE_BACKOFF_BITS); \ + } while (0); static int trace_function_entry(PyThreadState *tstate, _PyInterpreterFrame *frame) @@ -955,4071 +1012,233 @@ trace_function_exit(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject if (tstate->c_tracefunc) { if (call_trace_protected(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame, PyTrace_RETURN, retval)) { - return -1; - } - } - if (tstate->c_profilefunc) { - if (call_trace_protected(tstate->c_profilefunc, tstate->c_profileobj, - tstate, frame, PyTrace_RETURN, retval)) { - return -1; - } - } - return 0; -} - -static _PyInterpreterFrame * -pop_frame(PyThreadState *tstate, _PyInterpreterFrame *frame) -{ - _PyInterpreterFrame *prev_frame = frame->previous; - _PyEvalFrameClearAndPop(tstate, frame); - return prev_frame; -} - - -int _Py_CheckRecursiveCallPy( - PyThreadState *tstate) -{ - if (tstate->recursion_headroom) { - if (tstate->py_recursion_remaining < -50) { - /* Overflowing while handling an overflow. Give up. */ - Py_FatalError("Cannot recover from Python stack overflow."); - } - } - else { - if (tstate->py_recursion_remaining <= 0) { - tstate->recursion_headroom++; - _PyErr_Format(tstate, PyExc_RecursionError, - "maximum recursion depth exceeded"); - tstate->recursion_headroom--; - return -1; - } - } - return 0; -} - -static inline int _Py_EnterRecursivePy(PyThreadState *tstate) { - return (tstate->py_recursion_remaining-- <= 0) && - _Py_CheckRecursiveCallPy(tstate); -} - - -static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate) { - tstate->py_recursion_remaining++; -} - - -/* It is only between the KW_NAMES instruction and the following CALL, - * that this has any meaning. - */ -typedef struct { - PyObject *kwnames; -} CallShape; - -// GH-89279: Must be a macro to be sure it's inlined by MSVC. -#define is_method(stack_pointer, args) (PEEK((args)+2) != NULL) - -#define KWNAMES_LEN() \ - (call_shape.kwnames == NULL ? 0 : ((int)PyTuple_GET_SIZE(call_shape.kwnames))) - -PyObject* _Py_HOT_FUNCTION -_PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) -{ - _Py_EnsureTstateNotNULL(tstate); - CALL_STAT_INC(pyeval_calls); - -#if USE_COMPUTED_GOTOS -/* Import the static jump table */ -#include "opcode_targets.h" -#endif - -#ifdef Py_STATS - int lastopcode = 0; -#endif - // opcode is an 8-bit value to improve the code generated by MSVC - // for the big switch below (in combination with the EXTRA_CASES macro). - uint8_t opcode; /* Current opcode */ - int oparg; /* Current opcode argument, if any */ - _Py_atomic_int * const eval_breaker = &tstate->interp->ceval.eval_breaker; -#ifdef LLTRACE - int lltrace = 0; -#endif - - _PyCFrame cframe; - CallShape call_shape; - call_shape.kwnames = NULL; // Borrowed reference. Reset by CALL instructions. - - /* WARNING: Because the _PyCFrame lives on the C stack, - * but can be accessed from a heap allocated object (tstate) - * strict stack discipline must be maintained. - */ - _PyCFrame *prev_cframe = tstate->cframe; - cframe.use_tracing = prev_cframe->use_tracing; - cframe.previous = prev_cframe; - tstate->cframe = &cframe; - - frame->is_entry = true; - /* Push frame */ - frame->previous = prev_cframe->current_frame; - cframe.current_frame = frame; - - if (_Py_EnterRecursiveCallTstate(tstate, "")) { - tstate->c_recursion_remaining--; - tstate->py_recursion_remaining--; - goto exit_unwind; - } - - /* support for generator.throw() */ - if (throwflag) { - if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; - } - TRACE_FUNCTION_THROW_ENTRY(); - DTRACE_FUNCTION_ENTRY(); - goto resume_with_error; - } - - /* Local "register" variables. - * These are cached values from the frame and code object. */ - - PyObject *names; - PyObject *consts; - _Py_CODEUNIT *first_instr; - _Py_CODEUNIT *next_instr; - PyObject **stack_pointer; - -/* Sets the above local variables from the frame */ -#define SET_LOCALS_FROM_FRAME() \ - { \ - PyCodeObject *co = frame->f_code; \ - names = co->co_names; \ - consts = co->co_consts; \ - first_instr = _PyCode_CODE(co); \ - } \ - assert(_PyInterpreterFrame_LASTI(frame) >= -1); \ - /* Jump back to the last instruction executed... */ \ - next_instr = frame->prev_instr + 1; \ - stack_pointer = _PyFrame_GetStackPointer(frame); \ - /* Set stackdepth to -1. \ - Update when returning or calling trace function. \ - Having stackdepth <= 0 ensures that invalid \ - values are not visible to the cycle GC. \ - We choose -1 rather than 0 to assist debugging. \ - */ \ - frame->stacktop = -1; - - -start_frame: - if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; - } - -resume_frame: - SET_LOCALS_FROM_FRAME(); - -#ifdef LLTRACE - { - int r = PyDict_Contains(GLOBALS(), &_Py_ID(__lltrace__)); - if (r < 0) { - goto exit_unwind; - } - lltrace = r; - } - if (lltrace) { - lltrace_resume_frame(frame); - } -#endif - -#ifdef Py_DEBUG - /* _PyEval_EvalFrameDefault() must not be called with an exception set, - because it can clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!_PyErr_Occurred(tstate)); -#endif - - DISPATCH(); - -handle_eval_breaker: - - /* Do periodic things, like check for signals and async I/0. - * We need to do reasonably frequently, but not too frequently. - * All loops should include a check of the eval breaker. - * We also check on return from any builtin function. - */ - if (_Py_HandlePending(tstate) != 0) { - goto error; - } - DISPATCH(); - - { - /* Start instructions */ -#if !USE_COMPUTED_GOTOS - dispatch_opcode: - switch (opcode) -#endif - { - - /* BEWARE! - It is essential that any operation that fails must goto error - and that all operation that succeed call DISPATCH() ! */ - - TARGET(NOP) { - DISPATCH(); - } - - TARGET(RESUME) { - _PyCode_Warmup(frame->f_code); - GO_TO_INSTRUCTION(RESUME_QUICK); - } - - TARGET(RESUME_QUICK) { - PREDICTED(RESUME_QUICK); - assert(tstate->cframe == &cframe); - assert(frame == cframe.current_frame); - if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { - goto handle_eval_breaker; - } - DISPATCH(); - } - - TARGET(LOAD_CLOSURE) { - /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ - PyObject *value = GETLOCAL(oparg); - if (value == NULL) { - goto unbound_local_error; - } - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_FAST_CHECK) { - PyObject *value = GETLOCAL(oparg); - if (value == NULL) { - goto unbound_local_error; - } - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_FAST) { - PyObject *value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_CONST) { - PREDICTED(LOAD_CONST); - PyObject *value = GETITEM(consts, oparg); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_FAST) { - PyObject *value = POP(); - SETLOCAL(oparg, value); - DISPATCH(); - } - - TARGET(LOAD_FAST__LOAD_FAST) { - PyObject *value = GETLOCAL(oparg); - assert(value != NULL); - NEXTOPARG(); - next_instr++; - Py_INCREF(value); - PUSH(value); - value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_FAST__LOAD_CONST) { - PyObject *value = GETLOCAL(oparg); - assert(value != NULL); - NEXTOPARG(); - next_instr++; - Py_INCREF(value); - PUSH(value); - value = GETITEM(consts, oparg); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_FAST__LOAD_FAST) { - PyObject *value = POP(); - SETLOCAL(oparg, value); - NEXTOPARG(); - next_instr++; - value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_FAST__STORE_FAST) { - PyObject *value = POP(); - SETLOCAL(oparg, value); - NEXTOPARG(); - next_instr++; - value = POP(); - SETLOCAL(oparg, value); - DISPATCH(); - } - - TARGET(LOAD_CONST__LOAD_FAST) { - PyObject *value = GETITEM(consts, oparg); - NEXTOPARG(); - next_instr++; - Py_INCREF(value); - PUSH(value); - value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(POP_TOP) { - PyObject *value = POP(); - Py_DECREF(value); - DISPATCH(); - } - - TARGET(PUSH_NULL) { - /* Use BASIC_PUSH as NULL is not a valid object pointer */ - BASIC_PUSH(NULL); - DISPATCH(); - } - - TARGET(END_FOR) { - PyObject *value = POP(); - Py_DECREF(value); - value = POP(); - Py_DECREF(value); - DISPATCH(); - } - - TARGET(UNARY_POSITIVE) { - PyObject *value = TOP(); - PyObject *res = PyNumber_Positive(value); - Py_DECREF(value); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(UNARY_NEGATIVE) { - PyObject *value = TOP(); - PyObject *res = PyNumber_Negative(value); - Py_DECREF(value); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(UNARY_NOT) { - PyObject *value = TOP(); - int err = PyObject_IsTrue(value); - Py_DECREF(value); - if (err == 0) { - Py_INCREF(Py_True); - SET_TOP(Py_True); - DISPATCH(); - } - else if (err > 0) { - Py_INCREF(Py_False); - SET_TOP(Py_False); - DISPATCH(); - } - STACK_SHRINK(1); - goto error; - } - - TARGET(UNARY_INVERT) { - PyObject *value = TOP(); - PyObject *res = PyNumber_Invert(value); - Py_DECREF(value); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(BINARY_OP_MULTIPLY_INT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *prod = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); - SET_SECOND(prod); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - STACK_SHRINK(1); - if (prod == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_MULTIPLY_FLOAT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - double dprod = ((PyFloatObject *)left)->ob_fval * - ((PyFloatObject *)right)->ob_fval; - PyObject *prod = PyFloat_FromDouble(dprod); - SET_SECOND(prod); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - STACK_SHRINK(1); - if (prod == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_SUBTRACT_INT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *sub = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); - SET_SECOND(sub); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - STACK_SHRINK(1); - if (sub == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_SUBTRACT_FLOAT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; - PyObject *sub = PyFloat_FromDouble(dsub); - SET_SECOND(sub); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - STACK_SHRINK(1); - if (sub == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_ADD_UNICODE) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *res = PyUnicode_Concat(left, right); - STACK_SHRINK(1); - SET_TOP(res); - _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); - _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (TOP() == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; - assert(_Py_OPCODE(true_next) == STORE_FAST || - _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST); - PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next)); - DEOPT_IF(*target_local != left, BINARY_OP); - STAT_INC(BINARY_OP, hit); - /* Handle `left = left + right` or `left += right` for str. - * - * When possible, extend `left` in place rather than - * allocating a new PyUnicodeObject. This attempts to avoid - * quadratic behavior when one neglects to use str.join(). - * - * If `left` has only two references remaining (one from - * the stack, one in the locals), DECREFing `left` leaves - * only the locals reference, so PyUnicode_Append knows - * that the string is safe to mutate. - */ - assert(Py_REFCNT(left) >= 2); - _Py_DECREF_NO_DEALLOC(left); - STACK_SHRINK(2); - PyUnicode_Append(target_local, right); - _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (*target_local == NULL) { - goto error; - } - // The STORE_FAST is already done. - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); - DISPATCH(); - } - - TARGET(BINARY_OP_ADD_FLOAT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - STAT_INC(BINARY_OP, hit); - double dsum = ((PyFloatObject *)left)->ob_fval + - ((PyFloatObject *)right)->ob_fval; - PyObject *sum = PyFloat_FromDouble(dsum); - SET_SECOND(sum); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - STACK_SHRINK(1); - if (sum == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_ADD_INT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *sum = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); - SET_SECOND(sum); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - STACK_SHRINK(1); - if (sum == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR) { - PREDICTED(BINARY_SUBSCR); - PyObject *sub = POP(); - PyObject *container = TOP(); - PyObject *res = PyObject_GetItem(container, sub); - Py_DECREF(container); - Py_DECREF(sub); - SET_TOP(res); - if (res == NULL) - goto error; - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SLICE) { - PyObject *stop = POP(); - PyObject *start = POP(); - PyObject *container = TOP(); - - PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); - if (slice == NULL) { - goto error; - } - PyObject *res = PyObject_GetItem(container, slice); - Py_DECREF(slice); - if (res == NULL) { - goto error; - } - SET_TOP(res); - Py_DECREF(container); - DISPATCH(); - } - - TARGET(STORE_SLICE) { - PyObject *stop = POP(); - PyObject *start = POP(); - PyObject *container = TOP(); - PyObject *v = SECOND(); - - PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); - if (slice == NULL) { - goto error; - } - int err = PyObject_SetItem(container, slice, v); - Py_DECREF(slice); - if (err) { - goto error; - } - STACK_SHRINK(2); - Py_DECREF(v); - Py_DECREF(container); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_ADAPTIVE) { - _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - next_instr--; - if (_Py_Specialize_BinarySubscr(container, sub, next_instr) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(BINARY_SUBSCR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(BINARY_SUBSCR); - } - } - - TARGET(BINARY_SUBSCR_LIST_INT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *list = SECOND(); - DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); - DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); - - // Deopt unless 0 <= sub < PyList_Size(list) - Py_ssize_t signed_magnitude = Py_SIZE(sub); - DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); - Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; - DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - PyObject *res = PyList_GET_ITEM(list, index); - assert(res != NULL); - Py_INCREF(res); - STACK_SHRINK(1); - _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); - SET_TOP(res); - Py_DECREF(list); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_TUPLE_INT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *tuple = SECOND(); - DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); - DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); - - // Deopt unless 0 <= sub < PyTuple_Size(list) - Py_ssize_t signed_magnitude = Py_SIZE(sub); - DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); - Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; - DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - PyObject *res = PyTuple_GET_ITEM(tuple, index); - assert(res != NULL); - Py_INCREF(res); - STACK_SHRINK(1); - _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); - SET_TOP(res); - Py_DECREF(tuple); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_DICT) { - assert(cframe.use_tracing == 0); - PyObject *dict = SECOND(); - DEOPT_IF(!PyDict_CheckExact(SECOND()), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - PyObject *sub = TOP(); - PyObject *res = PyDict_GetItemWithError(dict, sub); - if (res == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_SetKeyError(sub); - } - goto error; - } - Py_INCREF(res); - STACK_SHRINK(1); - Py_DECREF(sub); - SET_TOP(res); - Py_DECREF(dict); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_GETITEM) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - PyTypeObject *tp = Py_TYPE(container); - DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); - assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); - PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; - assert(PyFunction_Check(cached)); - PyFunctionObject *getitem = (PyFunctionObject *)cached; - DEOPT_IF(getitem->func_version != cache->func_version, BINARY_SUBSCR); - PyCodeObject *code = (PyCodeObject *)getitem->func_code; - assert(code->co_argcount == 2); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - Py_INCREF(getitem); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem); - STACK_SHRINK(2); - new_frame->localsplus[0] = container; - new_frame->localsplus[1] = sub; - for (int i = 2; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - - TARGET(LIST_APPEND) { - PyObject *v = POP(); - PyObject *list = PEEK(oparg); - if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) - goto error; - PREDICT(JUMP_BACKWARD_QUICK); - DISPATCH(); - } - - TARGET(SET_ADD) { - PyObject *v = POP(); - PyObject *set = PEEK(oparg); - int err; - err = PySet_Add(set, v); - Py_DECREF(v); - if (err != 0) - goto error; - PREDICT(JUMP_BACKWARD_QUICK); - DISPATCH(); - } - - TARGET(STORE_SUBSCR) { - PREDICTED(STORE_SUBSCR); - PyObject *sub = TOP(); - PyObject *container = SECOND(); - PyObject *v = THIRD(); - int err; - STACK_SHRINK(3); - /* container[sub] = v */ - err = PyObject_SetItem(container, sub, v); - Py_DECREF(v); - Py_DECREF(container); - Py_DECREF(sub); - if (err != 0) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); - DISPATCH(); - } - - TARGET(STORE_SUBSCR_ADAPTIVE) { - _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - next_instr--; - if (_Py_Specialize_StoreSubscr(container, sub, next_instr) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(STORE_SUBSCR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(STORE_SUBSCR); - } - } - - TARGET(STORE_SUBSCR_LIST_INT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *list = SECOND(); - PyObject *value = THIRD(); - DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); - DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); - - // Ensure nonnegative, zero-or-one-digit ints. - DEOPT_IF(((size_t)Py_SIZE(sub)) > 1, STORE_SUBSCR); - Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; - // Ensure index < len(list) - DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); - STAT_INC(STORE_SUBSCR, hit); - - PyObject *old_value = PyList_GET_ITEM(list, index); - PyList_SET_ITEM(list, index, value); - STACK_SHRINK(3); - assert(old_value != NULL); - Py_DECREF(old_value); - _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); - Py_DECREF(list); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); - DISPATCH(); - } - - TARGET(STORE_SUBSCR_DICT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *dict = SECOND(); - PyObject *value = THIRD(); - DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); - STACK_SHRINK(3); - STAT_INC(STORE_SUBSCR, hit); - int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); - Py_DECREF(dict); - if (err != 0) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); - DISPATCH(); - } - - TARGET(DELETE_SUBSCR) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - int err; - STACK_SHRINK(2); - /* del container[sub] */ - err = PyObject_DelItem(container, sub); - Py_DECREF(container); - Py_DECREF(sub); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(PRINT_EXPR) { - PyObject *value = POP(); - PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook)); - PyObject *res; - if (hook == NULL) { - _PyErr_SetString(tstate, PyExc_RuntimeError, - "lost sys.displayhook"); - Py_DECREF(value); - goto error; - } - res = PyObject_CallOneArg(hook, value); - Py_DECREF(value); - if (res == NULL) - goto error; - Py_DECREF(res); - DISPATCH(); - } - - TARGET(RAISE_VARARGS) { - PyObject *cause = NULL, *exc = NULL; - switch (oparg) { - case 2: - cause = POP(); /* cause */ - /* fall through */ - case 1: - exc = POP(); /* exc */ - /* fall through */ - case 0: - if (do_raise(tstate, exc, cause)) { - goto exception_unwind; - } - break; - default: - _PyErr_SetString(tstate, PyExc_SystemError, - "bad RAISE_VARARGS oparg"); - break; - } - goto error; - } - - TARGET(RETURN_VALUE) { - PyObject *retval = POP(); - assert(EMPTY()); - _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); - _Py_LeaveRecursiveCallPy(tstate); - if (!frame->is_entry) { - frame = cframe.current_frame = pop_frame(tstate, frame); - _PyFrame_StackPush(frame, retval); - goto resume_frame; - } - _Py_LeaveRecursiveCallTstate(tstate); - /* Restore previous cframe and return. */ - tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; - assert(tstate->cframe->current_frame == frame->previous); - assert(!_PyErr_Occurred(tstate)); - return retval; - } - - TARGET(GET_AITER) { - unaryfunc getter = NULL; - PyObject *iter = NULL; - PyObject *obj = TOP(); - PyTypeObject *type = Py_TYPE(obj); - - if (type->tp_as_async != NULL) { - getter = type->tp_as_async->am_aiter; - } - - if (getter != NULL) { - iter = (*getter)(obj); - Py_DECREF(obj); - if (iter == NULL) { - SET_TOP(NULL); - goto error; - } - } - else { - SET_TOP(NULL); - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' requires an object with " - "__aiter__ method, got %.100s", - type->tp_name); - Py_DECREF(obj); - goto error; - } - - if (Py_TYPE(iter)->tp_as_async == NULL || - Py_TYPE(iter)->tp_as_async->am_anext == NULL) { - - SET_TOP(NULL); - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' received an object from __aiter__ " - "that does not implement __anext__: %.100s", - Py_TYPE(iter)->tp_name); - Py_DECREF(iter); - goto error; - } - - SET_TOP(iter); - DISPATCH(); - } - - TARGET(GET_ANEXT) { - unaryfunc getter = NULL; - PyObject *next_iter = NULL; - PyObject *awaitable = NULL; - PyObject *aiter = TOP(); - PyTypeObject *type = Py_TYPE(aiter); - - if (PyAsyncGen_CheckExact(aiter)) { - awaitable = type->tp_as_async->am_anext(aiter); - if (awaitable == NULL) { - goto error; - } - } else { - if (type->tp_as_async != NULL){ - getter = type->tp_as_async->am_anext; - } - - if (getter != NULL) { - next_iter = (*getter)(aiter); - if (next_iter == NULL) { - goto error; - } - } - else { - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' requires an iterator with " - "__anext__ method, got %.100s", - type->tp_name); - goto error; - } - - awaitable = _PyCoro_GetAwaitableIter(next_iter); - if (awaitable == NULL) { - _PyErr_FormatFromCause( - PyExc_TypeError, - "'async for' received an invalid object " - "from __anext__: %.100s", - Py_TYPE(next_iter)->tp_name); - - Py_DECREF(next_iter); - goto error; - } else { - Py_DECREF(next_iter); - } - } - - PUSH(awaitable); - PREDICT(LOAD_CONST); - DISPATCH(); - } - - TARGET(GET_AWAITABLE) { - PREDICTED(GET_AWAITABLE); - PyObject *iterable = TOP(); - PyObject *iter = _PyCoro_GetAwaitableIter(iterable); - - if (iter == NULL) { - format_awaitable_error(tstate, Py_TYPE(iterable), oparg); - } - - Py_DECREF(iterable); - - if (iter != NULL && PyCoro_CheckExact(iter)) { - PyObject *yf = _PyGen_yf((PyGenObject*)iter); - if (yf != NULL) { - /* `iter` is a coroutine object that is being - awaited, `yf` is a pointer to the current awaitable - being awaited on. */ - Py_DECREF(yf); - Py_CLEAR(iter); - _PyErr_SetString(tstate, PyExc_RuntimeError, - "coroutine is being awaited already"); - /* The code below jumps to `error` if `iter` is NULL. */ - } - } - - SET_TOP(iter); /* Even if it's NULL */ - - if (iter == NULL) { - goto error; - } - - PREDICT(LOAD_CONST); - DISPATCH(); - } - - TARGET(SEND) { - assert(frame->is_entry); - assert(STACK_LEVEL() >= 2); - PyObject *v = POP(); - PyObject *receiver = TOP(); - PySendResult gen_status; - PyObject *retval; - if (tstate->c_tracefunc == NULL) { - gen_status = PyIter_Send(receiver, v, &retval); - } else { - if (Py_IsNone(v) && PyIter_Check(receiver)) { - retval = Py_TYPE(receiver)->tp_iternext(receiver); - } - else { - retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); - } - if (retval == NULL) { - if (tstate->c_tracefunc != NULL - && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); - if (_PyGen_FetchStopIterationValue(&retval) == 0) { - gen_status = PYGEN_RETURN; - } - else { - gen_status = PYGEN_ERROR; - } - } - else { - gen_status = PYGEN_NEXT; - } - } - Py_DECREF(v); - if (gen_status == PYGEN_ERROR) { - assert(retval == NULL); - goto error; - } - if (gen_status == PYGEN_RETURN) { - assert(retval != NULL); - Py_DECREF(receiver); - SET_TOP(retval); - JUMPBY(oparg); - DISPATCH(); - } - assert(gen_status == PYGEN_NEXT); - assert(retval != NULL); - PUSH(retval); - DISPATCH(); - } - - TARGET(ASYNC_GEN_WRAP) { - PyObject *v = TOP(); - assert(frame->f_code->co_flags & CO_ASYNC_GENERATOR); - PyObject *w = _PyAsyncGenValueWrapperNew(v); - if (w == NULL) { - goto error; - } - SET_TOP(w); - Py_DECREF(v); - DISPATCH(); - } - - TARGET(YIELD_VALUE) { - // NOTE: It's important that YIELD_VALUE never raises an exception! - // The compiler treats any exception raised here as a failed close() - // or throw() call. - assert(oparg == STACK_LEVEL()); - assert(frame->is_entry); - PyObject *retval = POP(); - _PyFrame_GetGenerator(frame)->gi_frame_state = FRAME_SUSPENDED; - _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); - _Py_LeaveRecursiveCallPy(tstate); - _Py_LeaveRecursiveCallTstate(tstate); - /* Restore previous cframe and return. */ - tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; - assert(tstate->cframe->current_frame == frame->previous); - assert(!_PyErr_Occurred(tstate)); - return retval; - } - - TARGET(POP_EXCEPT) { - _PyErr_StackItem *exc_info = tstate->exc_info; - PyObject *value = exc_info->exc_value; - exc_info->exc_value = POP(); - Py_XDECREF(value); - DISPATCH(); - } - - TARGET(RERAISE) { - if (oparg) { - PyObject *lasti = PEEK(oparg + 1); - if (PyLong_Check(lasti)) { - frame->prev_instr = first_instr + PyLong_AsLong(lasti); - assert(!_PyErr_Occurred(tstate)); - } - else { - assert(PyLong_Check(lasti)); - _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); - goto error; - } - } - PyObject *val = POP(); - assert(val && PyExceptionInstance_Check(val)); - PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); - PyObject *tb = PyException_GetTraceback(val); - _PyErr_Restore(tstate, exc, val, tb); - goto exception_unwind; - } - - TARGET(PREP_RERAISE_STAR) { - PyObject *excs = POP(); - assert(PyList_Check(excs)); - PyObject *orig = POP(); - - PyObject *val = _PyExc_PrepReraiseStar(orig, excs); - Py_DECREF(excs); - Py_DECREF(orig); - - if (val == NULL) { - goto error; - } - - PUSH(val); - DISPATCH(); - } - - TARGET(END_ASYNC_FOR) { - PyObject *val = POP(); - assert(val && PyExceptionInstance_Check(val)); - if (PyErr_GivenExceptionMatches(val, PyExc_StopAsyncIteration)) { - Py_DECREF(val); - Py_DECREF(POP()); - DISPATCH(); - } - else { - PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); - PyObject *tb = PyException_GetTraceback(val); - _PyErr_Restore(tstate, exc, val, tb); - goto exception_unwind; - } - } - - TARGET(CLEANUP_THROW) { - assert(throwflag); - PyObject *exc_value = TOP(); - assert(exc_value && PyExceptionInstance_Check(exc_value)); - if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { - PyObject *value = ((PyStopIterationObject *)exc_value)->value; - Py_INCREF(value); - Py_DECREF(POP()); // The StopIteration. - Py_DECREF(POP()); // The last sent value. - Py_DECREF(POP()); // The delegated sub-iterator. - PUSH(value); - DISPATCH(); - } - Py_INCREF(exc_value); - PyObject *exc_type = Py_NewRef(Py_TYPE(exc_value)); - PyObject *exc_traceback = PyException_GetTraceback(exc_value); - _PyErr_Restore(tstate, exc_type, exc_value, exc_traceback); - goto exception_unwind; - } - - TARGET(LOAD_ASSERTION_ERROR) { - PyObject *value = PyExc_AssertionError; - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_BUILD_CLASS) { - PyObject *bc; - if (PyDict_CheckExact(BUILTINS())) { - bc = _PyDict_GetItemWithError(BUILTINS(), - &_Py_ID(__build_class__)); - if (bc == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_SetString(tstate, PyExc_NameError, - "__build_class__ not found"); - } - goto error; - } - Py_INCREF(bc); - } - else { - bc = PyObject_GetItem(BUILTINS(), &_Py_ID(__build_class__)); - if (bc == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) - _PyErr_SetString(tstate, PyExc_NameError, - "__build_class__ not found"); - goto error; - } - } - PUSH(bc); - DISPATCH(); - } - - TARGET(STORE_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *v = POP(); - PyObject *ns = LOCALS(); - int err; - if (ns == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals found when storing %R", name); - Py_DECREF(v); - goto error; - } - if (PyDict_CheckExact(ns)) - err = PyDict_SetItem(ns, name, v); - else - err = PyObject_SetItem(ns, name, v); - Py_DECREF(v); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(DELETE_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *ns = LOCALS(); - int err; - if (ns == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals when deleting %R", name); - goto error; - } - err = PyObject_DelItem(ns, name); - if (err != 0) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, - name); - goto error; - } - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE) { - PREDICTED(UNPACK_SEQUENCE); - PyObject *seq = POP(); - PyObject **top = stack_pointer + oparg; - if (!unpack_iterable(tstate, seq, oparg, -1, top)) { - Py_DECREF(seq); - goto error; - } - STACK_GROW(oparg); - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *seq = TOP(); - next_instr--; - _Py_Specialize_UnpackSequence(seq, next_instr, oparg); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(UNPACK_SEQUENCE, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(UNPACK_SEQUENCE); - } - } - - TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { - PyObject *seq = TOP(); - DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); - DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); - STAT_INC(UNPACK_SEQUENCE, hit); - SET_TOP(Py_NewRef(PyTuple_GET_ITEM(seq, 1))); - PUSH(Py_NewRef(PyTuple_GET_ITEM(seq, 0))); - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE_TUPLE) { - PyObject *seq = TOP(); - DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); - DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); - STAT_INC(UNPACK_SEQUENCE, hit); - STACK_SHRINK(1); - PyObject **items = _PyTuple_ITEMS(seq); - while (oparg--) { - PUSH(Py_NewRef(items[oparg])); - } - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE_LIST) { - PyObject *seq = TOP(); - DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); - DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); - STAT_INC(UNPACK_SEQUENCE, hit); - STACK_SHRINK(1); - PyObject **items = _PyList_ITEMS(seq); - while (oparg--) { - PUSH(Py_NewRef(items[oparg])); - } - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_EX) { - int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); - PyObject *seq = POP(); - PyObject **top = stack_pointer + totalargs; - if (!unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top)) { - Py_DECREF(seq); - goto error; - } - STACK_GROW(totalargs); - Py_DECREF(seq); - DISPATCH(); - } - - TARGET(STORE_ATTR) { - PREDICTED(STORE_ATTR); - PyObject *name = GETITEM(names, oparg); - PyObject *owner = TOP(); - PyObject *v = SECOND(); - int err; - STACK_SHRINK(2); - err = PyObject_SetAttr(owner, name, v); - Py_DECREF(v); - Py_DECREF(owner); - if (err != 0) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(DELETE_ATTR) { - PyObject *name = GETITEM(names, oparg); - PyObject *owner = POP(); - int err; - err = PyObject_SetAttr(owner, name, (PyObject *)NULL); - Py_DECREF(owner); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(STORE_GLOBAL) { - PyObject *name = GETITEM(names, oparg); - PyObject *v = POP(); - int err; - err = PyDict_SetItem(GLOBALS(), name, v); - Py_DECREF(v); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(DELETE_GLOBAL) { - PyObject *name = GETITEM(names, oparg); - int err; - err = PyDict_DelItem(GLOBALS(), name); - if (err != 0) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - DISPATCH(); - } - - TARGET(LOAD_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *locals = LOCALS(); - PyObject *v; - if (locals == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals when loading %R", name); - goto error; - } - if (PyDict_CheckExact(locals)) { - v = PyDict_GetItemWithError(locals, name); - if (v != NULL) { - Py_INCREF(v); - } - else if (_PyErr_Occurred(tstate)) { - goto error; - } - } - else { - v = PyObject_GetItem(locals, name); - if (v == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) - goto error; - _PyErr_Clear(tstate); - } - } - if (v == NULL) { - v = PyDict_GetItemWithError(GLOBALS(), name); - if (v != NULL) { - Py_INCREF(v); - } - else if (_PyErr_Occurred(tstate)) { - goto error; - } - else { - if (PyDict_CheckExact(BUILTINS())) { - v = PyDict_GetItemWithError(BUILTINS(), name); - if (v == NULL) { - if (!_PyErr_Occurred(tstate)) { - format_exc_check_arg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - Py_INCREF(v); - } - else { - v = PyObject_GetItem(BUILTINS(), name); - if (v == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - } - } - } - PUSH(v); - DISPATCH(); - } - - TARGET(LOAD_GLOBAL) { - PREDICTED(LOAD_GLOBAL); - int push_null = oparg & 1; - PEEK(0) = NULL; - PyObject *name = GETITEM(names, oparg>>1); - PyObject *v; - if (PyDict_CheckExact(GLOBALS()) - && PyDict_CheckExact(BUILTINS())) - { - v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (v == NULL) { - if (!_PyErr_Occurred(tstate)) { - /* _PyDict_LoadGlobal() returns NULL without raising - * an exception if the key doesn't exist */ - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - Py_INCREF(v); - } - else { - /* Slow-path if globals or builtins is not a dict */ - - /* namespace 1: globals */ - v = PyObject_GetItem(GLOBALS(), name); - if (v == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - goto error; - } - _PyErr_Clear(tstate); - - /* namespace 2: builtins */ - v = PyObject_GetItem(BUILTINS(), name); - if (v == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - } - } - /* Skip over inline cache */ - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); - STACK_GROW(push_null); - PUSH(v); - DISPATCH(); - } - - TARGET(LOAD_GLOBAL_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *name = GETITEM(names, oparg>>1); - next_instr--; - if (_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(LOAD_GLOBAL, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(LOAD_GLOBAL); - } - } - - TARGET(LOAD_GLOBAL_MODULE) { - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); - PyDictObject *dict = (PyDictObject *)GLOBALS(); - _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; - uint32_t version = read_u32(cache->module_keys_version); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - assert(DK_IS_UNICODE(dict->ma_keys)); - PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); - PyObject *res = entries[cache->index].me_value; - DEOPT_IF(res == NULL, LOAD_GLOBAL); - int push_null = oparg & 1; - PEEK(0) = NULL; - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); - STAT_INC(LOAD_GLOBAL, hit); - STACK_GROW(push_null+1); - Py_INCREF(res); - SET_TOP(res); - DISPATCH(); - } - - TARGET(LOAD_GLOBAL_BUILTIN) { - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); - DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); - PyDictObject *mdict = (PyDictObject *)GLOBALS(); - PyDictObject *bdict = (PyDictObject *)BUILTINS(); - _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; - uint32_t mod_version = read_u32(cache->module_keys_version); - uint16_t bltn_version = cache->builtin_keys_version; - DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); - DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); - assert(DK_IS_UNICODE(bdict->ma_keys)); - PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); - PyObject *res = entries[cache->index].me_value; - DEOPT_IF(res == NULL, LOAD_GLOBAL); - int push_null = oparg & 1; - PEEK(0) = NULL; - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); - STAT_INC(LOAD_GLOBAL, hit); - STACK_GROW(push_null+1); - Py_INCREF(res); - SET_TOP(res); - DISPATCH(); - } - - TARGET(DELETE_FAST) { - PyObject *v = GETLOCAL(oparg); - if (v != NULL) { - SETLOCAL(oparg, NULL); - DISPATCH(); - } - goto unbound_local_error; - } - - TARGET(MAKE_CELL) { - // "initial" is probably NULL but not if it's an arg (or set - // via PyFrame_LocalsToFast() before MAKE_CELL has run). - PyObject *initial = GETLOCAL(oparg); - PyObject *cell = PyCell_New(initial); - if (cell == NULL) { - goto resume_with_error; - } - SETLOCAL(oparg, cell); - DISPATCH(); - } - - TARGET(DELETE_DEREF) { - PyObject *cell = GETLOCAL(oparg); - PyObject *oldobj = PyCell_GET(cell); - if (oldobj != NULL) { - PyCell_SET(cell, NULL); - Py_DECREF(oldobj); - DISPATCH(); - } - format_exc_unbound(tstate, frame->f_code, oparg); - goto error; - } - - TARGET(LOAD_CLASSDEREF) { - PyObject *name, *value, *locals = LOCALS(); - assert(locals); - assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); - name = PyTuple_GET_ITEM(frame->f_code->co_localsplusnames, oparg); - if (PyDict_CheckExact(locals)) { - value = PyDict_GetItemWithError(locals, name); - if (value != NULL) { - Py_INCREF(value); - } - else if (_PyErr_Occurred(tstate)) { - goto error; - } - } - else { - value = PyObject_GetItem(locals, name); - if (value == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - goto error; - } - _PyErr_Clear(tstate); - } - } - if (!value) { - PyObject *cell = GETLOCAL(oparg); - value = PyCell_GET(cell); - if (value == NULL) { - format_exc_unbound(tstate, frame->f_code, oparg); - goto error; - } - Py_INCREF(value); - } - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_DEREF) { - PyObject *cell = GETLOCAL(oparg); - PyObject *value = PyCell_GET(cell); - if (value == NULL) { - format_exc_unbound(tstate, frame->f_code, oparg); - goto error; - } - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_DEREF) { - PyObject *v = POP(); - PyObject *cell = GETLOCAL(oparg); - PyObject *oldobj = PyCell_GET(cell); - PyCell_SET(cell, v); - Py_XDECREF(oldobj); - DISPATCH(); - } - - TARGET(COPY_FREE_VARS) { - /* Copy closure variables to free variables */ - PyCodeObject *co = frame->f_code; - assert(PyFunction_Check(frame->f_funcobj)); - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; - int offset = co->co_nlocals + co->co_nplaincellvars; - assert(oparg == co->co_nfreevars); - for (int i = 0; i < oparg; ++i) { - PyObject *o = PyTuple_GET_ITEM(closure, i); - Py_INCREF(o); - frame->localsplus[offset + i] = o; - } - DISPATCH(); - } - - TARGET(BUILD_STRING) { - PyObject *str; - str = _PyUnicode_JoinArray(&_Py_STR(empty), - stack_pointer - oparg, oparg); - if (str == NULL) - goto error; - while (--oparg >= 0) { - PyObject *item = POP(); - Py_DECREF(item); - } - PUSH(str); - DISPATCH(); - } - - TARGET(BUILD_TUPLE) { - STACK_SHRINK(oparg); - PyObject *tup = _PyTuple_FromArraySteal(stack_pointer, oparg); - if (tup == NULL) - goto error; - PUSH(tup); - DISPATCH(); - } - - TARGET(BUILD_LIST) { - PyObject *list = PyList_New(oparg); - if (list == NULL) - goto error; - while (--oparg >= 0) { - PyObject *item = POP(); - PyList_SET_ITEM(list, oparg, item); - } - PUSH(list); - DISPATCH(); - } - - TARGET(LIST_TO_TUPLE) { - PyObject *list = POP(); - PyObject *tuple = PyList_AsTuple(list); - Py_DECREF(list); - if (tuple == NULL) { - goto error; - } - PUSH(tuple); - DISPATCH(); - } - - TARGET(LIST_EXTEND) { - PyObject *iterable = POP(); - PyObject *list = PEEK(oparg); - PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); - if (none_val == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && - (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable))) - { - _PyErr_Clear(tstate); - _PyErr_Format(tstate, PyExc_TypeError, - "Value after * must be an iterable, not %.200s", - Py_TYPE(iterable)->tp_name); - } - Py_DECREF(iterable); - goto error; - } - Py_DECREF(none_val); - Py_DECREF(iterable); - DISPATCH(); - } - - TARGET(SET_UPDATE) { - PyObject *iterable = POP(); - PyObject *set = PEEK(oparg); - int err = _PySet_Update(set, iterable); - Py_DECREF(iterable); - if (err < 0) { - goto error; - } - DISPATCH(); - } - - TARGET(BUILD_SET) { - PyObject *set = PySet_New(NULL); - int err = 0; - int i; - if (set == NULL) - goto error; - for (i = oparg; i > 0; i--) { - PyObject *item = PEEK(i); - if (err == 0) - err = PySet_Add(set, item); - Py_DECREF(item); - } - STACK_SHRINK(oparg); - if (err != 0) { - Py_DECREF(set); - goto error; - } - PUSH(set); - DISPATCH(); - } - - TARGET(BUILD_MAP) { - PyObject *map = _PyDict_FromItems( - &PEEK(2*oparg), 2, - &PEEK(2*oparg - 1), 2, - oparg); - if (map == NULL) - goto error; - - while (oparg--) { - Py_DECREF(POP()); - Py_DECREF(POP()); - } - PUSH(map); - DISPATCH(); - } - - TARGET(SETUP_ANNOTATIONS) { - int err; - PyObject *ann_dict; - if (LOCALS() == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals found when setting up annotations"); - goto error; - } - /* check if __annotations__ in locals()... */ - if (PyDict_CheckExact(LOCALS())) { - ann_dict = _PyDict_GetItemWithError(LOCALS(), - &_Py_ID(__annotations__)); - if (ann_dict == NULL) { - if (_PyErr_Occurred(tstate)) { - goto error; - } - /* ...if not, create a new one */ - ann_dict = PyDict_New(); - if (ann_dict == NULL) { - goto error; - } - err = PyDict_SetItem(LOCALS(), &_Py_ID(__annotations__), - ann_dict); - Py_DECREF(ann_dict); - if (err != 0) { - goto error; - } - } - } - else { - /* do the same if locals() is not a dict */ - ann_dict = PyObject_GetItem(LOCALS(), &_Py_ID(__annotations__)); - if (ann_dict == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - goto error; - } - _PyErr_Clear(tstate); - ann_dict = PyDict_New(); - if (ann_dict == NULL) { - goto error; - } - err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), - ann_dict); - Py_DECREF(ann_dict); - if (err != 0) { - goto error; - } - } - else { - Py_DECREF(ann_dict); - } - } - DISPATCH(); - } - - TARGET(BUILD_CONST_KEY_MAP) { - PyObject *map; - PyObject *keys = TOP(); - if (!PyTuple_CheckExact(keys) || - PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { - _PyErr_SetString(tstate, PyExc_SystemError, - "bad BUILD_CONST_KEY_MAP keys argument"); - goto error; - } - map = _PyDict_FromItems( - &PyTuple_GET_ITEM(keys, 0), 1, - &PEEK(oparg + 1), 1, oparg); - if (map == NULL) { - goto error; - } - - Py_DECREF(POP()); - while (oparg--) { - Py_DECREF(POP()); - } - PUSH(map); - DISPATCH(); - } - - TARGET(DICT_UPDATE) { - PyObject *update = POP(); - PyObject *dict = PEEK(oparg); - if (PyDict_Update(dict, update) < 0) { - if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object is not a mapping", - Py_TYPE(update)->tp_name); - } - Py_DECREF(update); - goto error; - } - Py_DECREF(update); - DISPATCH(); - } - - TARGET(DICT_MERGE) { - PyObject *update = POP(); - PyObject *dict = PEEK(oparg); - - if (_PyDict_MergeEx(dict, update, 2) < 0) { - format_kwargs_error(tstate, PEEK(2 + oparg), update); - Py_DECREF(update); - goto error; - } - Py_DECREF(update); - PREDICT(CALL_FUNCTION_EX); - DISPATCH(); - } - - TARGET(MAP_ADD) { - PyObject *value = TOP(); - PyObject *key = SECOND(); - PyObject *map; - STACK_SHRINK(2); - map = PEEK(oparg); /* dict */ - assert(PyDict_CheckExact(map)); - /* map[key] = value */ - if (_PyDict_SetItem_Take2((PyDictObject *)map, key, value) != 0) { - goto error; - } - PREDICT(JUMP_BACKWARD_QUICK); - DISPATCH(); - } - - TARGET(LOAD_ATTR) { - PREDICTED(LOAD_ATTR); - PyObject *name = GETITEM(names, oparg >> 1); - PyObject *owner = TOP(); - if (oparg & 1) { - /* Designed to work in tandem with CALL. */ - PyObject* meth = NULL; - - int meth_found = _PyObject_GetMethod(owner, name, &meth); - - if (meth == NULL) { - /* Most likely attribute wasn't found. */ - goto error; - } - - if (meth_found) { - /* We can bypass temporary bound method object. - meth is unbound method and obj is self. - - meth | self | arg1 | ... | argN - */ - SET_TOP(meth); - PUSH(owner); // self - } - else { - /* meth is not an unbound method (but a regular attr, or - something was returned by a descriptor protocol). Set - the second element of the stack to NULL, to signal - CALL that it's not a method call. - - NULL | meth | arg1 | ... | argN - */ - SET_TOP(NULL); - Py_DECREF(owner); - PUSH(meth); - } - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - PyObject *res = PyObject_GetAttr(owner, name); - if (res == NULL) { - goto error; - } - Py_DECREF(owner); - SET_TOP(res); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *owner = TOP(); - PyObject *name = GETITEM(names, oparg>>1); - next_instr--; - if (_Py_Specialize_LoadAttr(owner, next_instr, name) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(LOAD_ATTR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(LOAD_ATTR); - } - } - - TARGET(LOAD_ATTR_INSTANCE_VALUE) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - assert(tp->tp_dictoffset < 0); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - res = _PyDictOrValues_GetValues(dorv)->values[cache->index]; - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_MODULE) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); - PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; - assert(dict != NULL); - DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->version), - LOAD_ATTR); - assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); - assert(cache->index < dict->ma_keys->dk_nentries); - PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + cache->index; - res = ep->me_value; - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_WITH_HINT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); - DEOPT_IF(dict == NULL, LOAD_ATTR); - assert(PyDict_CheckExact((PyObject *)dict)); - PyObject *name = GETITEM(names, oparg>>1); - uint16_t hint = cache->index; - DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); - if (DK_IS_UNICODE(dict->ma_keys)) { - PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; - } - else { - PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; - } - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_SLOT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - char *addr = (char *)owner + cache->index; - res = *(PyObject **)addr; - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_CLASS) { - assert(cframe.use_tracing == 0); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - - PyObject *cls = TOP(); - DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, - LOAD_ATTR); - assert(type_version != 0); - - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(cls); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_PROPERTY) { - assert(cframe.use_tracing == 0); - DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - - PyObject *owner = TOP(); - PyTypeObject *cls = Py_TYPE(owner); - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); - assert(type_version != 0); - PyObject *fget = read_obj(cache->descr); - assert(Py_IS_TYPE(fget, &PyFunction_Type)); - PyFunctionObject *f = (PyFunctionObject *)fget; - uint32_t func_version = read_u32(cache->keys_version); - assert(func_version != 0); - DEOPT_IF(f->func_version != func_version, LOAD_ATTR); - PyCodeObject *code = (PyCodeObject *)f->func_code; - assert(code->co_argcount == 1); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(fget); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); - new_frame->localsplus[0] = owner; - for (int i = 1; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - - TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { - assert(cframe.use_tracing == 0); - DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - PyObject *owner = TOP(); - PyTypeObject *cls = Py_TYPE(owner); - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); - assert(type_version != 0); - PyObject *getattribute = read_obj(cache->descr); - assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); - PyFunctionObject *f = (PyFunctionObject *)getattribute; - uint32_t func_version = read_u32(cache->keys_version); - assert(func_version != 0); - DEOPT_IF(f->func_version != func_version, LOAD_ATTR); - PyCodeObject *code = (PyCodeObject *)f->func_code; - assert(code->co_argcount == 2); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); - STAT_INC(LOAD_ATTR, hit); - - PyObject *name = GETITEM(names, oparg >> 1); - Py_INCREF(f); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); - Py_INCREF(name); - new_frame->localsplus[0] = owner; - new_frame->localsplus[1] = name; - for (int i = 2; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - - TARGET(STORE_ATTR_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *owner = TOP(); - PyObject *name = GETITEM(names, oparg); - next_instr--; - if (_Py_Specialize_StoreAttr(owner, next_instr, name) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(STORE_ATTR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(STORE_ATTR); - } - } - - TARGET(STORE_ATTR_INSTANCE_VALUE) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); - STAT_INC(STORE_ATTR, hit); - Py_ssize_t index = cache->index; - STACK_SHRINK(1); - PyObject *value = POP(); - PyDictValues *values = _PyDictOrValues_GetValues(dorv); - PyObject *old_value = values->values[index]; - values->values[index] = value; - if (old_value == NULL) { - _PyDictValues_AddToInsertionOrder(values, index); - } - else { - Py_DECREF(old_value); - } - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(STORE_ATTR_WITH_HINT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); - DEOPT_IF(dict == NULL, STORE_ATTR); - assert(PyDict_CheckExact((PyObject *)dict)); - PyObject *name = GETITEM(names, oparg); - uint16_t hint = cache->index; - DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR); - PyObject *value, *old_value; - uint64_t new_version; - if (DK_IS_UNICODE(dict->ma_keys)) { - PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, STORE_ATTR); - old_value = ep->me_value; - DEOPT_IF(old_value == NULL, STORE_ATTR); - STACK_SHRINK(1); - value = POP(); - new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); - ep->me_value = value; - } - else { - PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, STORE_ATTR); - old_value = ep->me_value; - DEOPT_IF(old_value == NULL, STORE_ATTR); - STACK_SHRINK(1); - value = POP(); - new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); - ep->me_value = value; - } - Py_DECREF(old_value); - STAT_INC(STORE_ATTR, hit); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { - _PyObject_GC_TRACK(dict); - } - /* PEP 509 */ - dict->ma_version_tag = new_version; - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(STORE_ATTR_SLOT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - char *addr = (char *)owner + cache->index; - STAT_INC(STORE_ATTR, hit); - STACK_SHRINK(1); - PyObject *value = POP(); - PyObject *old_value = *(PyObject **)addr; - *(PyObject **)addr = value; - Py_XDECREF(old_value); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(COMPARE_OP) { - PREDICTED(COMPARE_OP); - assert(oparg <= Py_GE); - PyObject *right = POP(); - PyObject *left = TOP(); - PyObject *res = PyObject_RichCompare(left, right, oparg); - SET_TOP(res); - Py_DECREF(left); - Py_DECREF(right); - if (res == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - DISPATCH(); - } - - TARGET(COMPARE_OP_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *right = TOP(); - PyObject *left = SECOND(); - next_instr--; - _Py_Specialize_CompareOp(left, right, next_instr, oparg); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(COMPARE_OP, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(COMPARE_OP); - } - } - - TARGET(COMPARE_OP_FLOAT_JUMP) { - assert(cframe.use_tracing == 0); - // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false) - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - int when_to_jump_mask = cache->mask; - PyObject *right = TOP(); - PyObject *left = SECOND(); - DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); - double dleft = PyFloat_AS_DOUBLE(left); - double dright = PyFloat_AS_DOUBLE(right); - int sign = (dleft > dright) - (dleft < dright); - DEOPT_IF(isnan(dleft), COMPARE_OP); - DEOPT_IF(isnan(dright), COMPARE_OP); - STAT_INC(COMPARE_OP, hit); - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - NEXTOPARG(); - STACK_SHRINK(2); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); - int jump = (1 << (sign + 1)) & when_to_jump_mask; - if (!jump) { - next_instr++; - } - else { - JUMPBY(1 + oparg); - } - DISPATCH(); - } - - TARGET(COMPARE_OP_INT_JUMP) { - assert(cframe.use_tracing == 0); - // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false) - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - int when_to_jump_mask = cache->mask; - PyObject *right = TOP(); - PyObject *left = SECOND(); - DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); - DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP); - DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP); - STAT_INC(COMPARE_OP, hit); - assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); - Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0]; - Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0]; - int sign = (ileft > iright) - (ileft < iright); - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - NEXTOPARG(); - STACK_SHRINK(2); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); - int jump = (1 << (sign + 1)) & when_to_jump_mask; - if (!jump) { - next_instr++; - } - else { - JUMPBY(1 + oparg); - } - DISPATCH(); - } - - TARGET(COMPARE_OP_STR_JUMP) { - assert(cframe.use_tracing == 0); - // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false) - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - int invert = cache->mask; - PyObject *right = TOP(); - PyObject *left = SECOND(); - DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); - STAT_INC(COMPARE_OP, hit); - int res = _PyUnicode_Equal(left, right); - assert(oparg == Py_EQ || oparg == Py_NE); - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - NEXTOPARG(); - assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); - STACK_SHRINK(2); - _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); - _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - assert(res == 0 || res == 1); - assert(invert == 0 || invert == 1); - int jump = res ^ invert; - if (!jump) { - next_instr++; - } - else { - JUMPBY(1 + oparg); - } - DISPATCH(); - } - - TARGET(IS_OP) { - PyObject *right = POP(); - PyObject *left = TOP(); - int res = Py_Is(left, right) ^ oparg; - PyObject *b = res ? Py_True : Py_False; - Py_INCREF(b); - SET_TOP(b); - Py_DECREF(left); - Py_DECREF(right); - DISPATCH(); - } - - TARGET(CONTAINS_OP) { - PyObject *right = POP(); - PyObject *left = POP(); - int res = PySequence_Contains(right, left); - Py_DECREF(left); - Py_DECREF(right); - if (res < 0) { - goto error; - } - PyObject *b = (res^oparg) ? Py_True : Py_False; - Py_INCREF(b); - PUSH(b); - DISPATCH(); - } - - TARGET(CHECK_EG_MATCH) { - PyObject *match_type = POP(); - if (check_except_star_type_valid(tstate, match_type) < 0) { - Py_DECREF(match_type); - goto error; - } - - PyObject *exc_value = TOP(); - PyObject *match = NULL, *rest = NULL; - int res = exception_group_match(exc_value, match_type, - &match, &rest); - Py_DECREF(match_type); - if (res < 0) { - goto error; - } - - if (match == NULL || rest == NULL) { - assert(match == NULL); - assert(rest == NULL); - goto error; - } - if (Py_IsNone(match)) { - PUSH(match); - Py_XDECREF(rest); - } - else { - /* Total or partial match - update the stack from - * [val] - * to - * [rest, match] - * (rest can be Py_None) - */ - - SET_TOP(rest); - PUSH(match); - PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); - Py_DECREF(exc_value); - } - DISPATCH(); - } - - TARGET(CHECK_EXC_MATCH) { - PyObject *right = POP(); - PyObject *left = TOP(); - assert(PyExceptionInstance_Check(left)); - if (check_except_type_valid(tstate, right) < 0) { - Py_DECREF(right); - goto error; - } - - int res = PyErr_GivenExceptionMatches(left, right); - Py_DECREF(right); - PUSH(Py_NewRef(res ? Py_True : Py_False)); - DISPATCH(); - } - - TARGET(IMPORT_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *fromlist = POP(); - PyObject *level = TOP(); - PyObject *res; - res = import_name(tstate, frame, name, fromlist, level); - Py_DECREF(level); - Py_DECREF(fromlist); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(IMPORT_STAR) { - PyObject *from = POP(), *locals; - int err; - if (_PyFrame_FastToLocalsWithError(frame) < 0) { - Py_DECREF(from); - goto error; - } - - locals = LOCALS(); - if (locals == NULL) { - _PyErr_SetString(tstate, PyExc_SystemError, - "no locals found during 'import *'"); - Py_DECREF(from); - goto error; - } - err = import_all_from(tstate, locals, from); - _PyFrame_LocalsToFast(frame, 0); - Py_DECREF(from); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(IMPORT_FROM) { - PyObject *name = GETITEM(names, oparg); - PyObject *from = TOP(); - PyObject *res; - res = import_from(tstate, from, name); - PUSH(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(JUMP_FORWARD) { - JUMPBY(oparg); - DISPATCH(); - } - - TARGET(JUMP_BACKWARD) { - _PyCode_Warmup(frame->f_code); - GO_TO_INSTRUCTION(JUMP_BACKWARD_QUICK); - } - - TARGET(POP_JUMP_IF_FALSE) { - PREDICTED(POP_JUMP_IF_FALSE); - PyObject *cond = POP(); - if (Py_IsTrue(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsFalse(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - JUMPBY(oparg); - } - else { - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); - if (err > 0) - ; - else if (err == 0) { - JUMPBY(oparg); - } - else - goto error; - } - DISPATCH(); - } - - TARGET(POP_JUMP_IF_TRUE) { - PyObject *cond = POP(); - if (Py_IsFalse(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsTrue(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - JUMPBY(oparg); - } - else { - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); - if (err > 0) { - JUMPBY(oparg); - } - else if (err == 0) - ; - else - goto error; - } - DISPATCH(); - } - - TARGET(POP_JUMP_IF_NOT_NONE) { - PyObject *value = POP(); - if (!Py_IsNone(value)) { - JUMPBY(oparg); - } - Py_DECREF(value); - DISPATCH(); - } - - TARGET(POP_JUMP_IF_NONE) { - PyObject *value = POP(); - if (Py_IsNone(value)) { - _Py_DECREF_NO_DEALLOC(value); - JUMPBY(oparg); - } - else { - Py_DECREF(value); - } - DISPATCH(); - } - - TARGET(JUMP_IF_FALSE_OR_POP) { - PyObject *cond = TOP(); - int err; - if (Py_IsTrue(cond)) { - STACK_SHRINK(1); - _Py_DECREF_NO_DEALLOC(cond); - DISPATCH(); - } - if (Py_IsFalse(cond)) { - JUMPBY(oparg); - DISPATCH(); - } - err = PyObject_IsTrue(cond); - if (err > 0) { - STACK_SHRINK(1); - Py_DECREF(cond); - } - else if (err == 0) - JUMPBY(oparg); - else - goto error; - DISPATCH(); - } - - TARGET(JUMP_IF_TRUE_OR_POP) { - PyObject *cond = TOP(); - int err; - if (Py_IsFalse(cond)) { - STACK_SHRINK(1); - _Py_DECREF_NO_DEALLOC(cond); - DISPATCH(); - } - if (Py_IsTrue(cond)) { - JUMPBY(oparg); - DISPATCH(); - } - err = PyObject_IsTrue(cond); - if (err > 0) { - JUMPBY(oparg); - } - else if (err == 0) { - STACK_SHRINK(1); - Py_DECREF(cond); - } - else - goto error; - DISPATCH(); - } - - TARGET(JUMP_BACKWARD_NO_INTERRUPT) { - /* This bytecode is used in the `yield from` or `await` loop. - * If there is an interrupt, we want it handled in the innermost - * generator or coroutine, so we deliberately do not check it here. - * (see bpo-30039). - */ - JUMPBY(-oparg); - DISPATCH(); - } - - TARGET(JUMP_BACKWARD_QUICK) { - PREDICTED(JUMP_BACKWARD_QUICK); - assert(oparg < INSTR_OFFSET()); - JUMPBY(-oparg); - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(GET_LEN) { - // PUSH(len(TOS)) - Py_ssize_t len_i = PyObject_Length(TOP()); - if (len_i < 0) { - goto error; - } - PyObject *len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) { - goto error; - } - PUSH(len_o); - DISPATCH(); - } - - TARGET(MATCH_CLASS) { - // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or - // None on failure. - PyObject *names = POP(); - PyObject *type = POP(); - PyObject *subject = TOP(); - assert(PyTuple_CheckExact(names)); - PyObject *attrs = match_class(tstate, subject, type, oparg, names); - Py_DECREF(names); - Py_DECREF(type); - if (attrs) { - // Success! - assert(PyTuple_CheckExact(attrs)); - SET_TOP(attrs); - } - else if (_PyErr_Occurred(tstate)) { - // Error! - goto error; - } - else { - // Failure! - Py_INCREF(Py_None); - SET_TOP(Py_None); - } - Py_DECREF(subject); - DISPATCH(); - } - - TARGET(MATCH_MAPPING) { - PyObject *subject = TOP(); - int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; - PyObject *res = match ? Py_True : Py_False; - Py_INCREF(res); - PUSH(res); - PREDICT(POP_JUMP_IF_FALSE); - DISPATCH(); - } - - TARGET(MATCH_SEQUENCE) { - PyObject *subject = TOP(); - int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; - PyObject *res = match ? Py_True : Py_False; - Py_INCREF(res); - PUSH(res); - PREDICT(POP_JUMP_IF_FALSE); - DISPATCH(); - } - - TARGET(MATCH_KEYS) { - // On successful match, PUSH(values). Otherwise, PUSH(None). - PyObject *keys = TOP(); - PyObject *subject = SECOND(); - PyObject *values_or_none = match_keys(tstate, subject, keys); - if (values_or_none == NULL) { - goto error; - } - PUSH(values_or_none); - DISPATCH(); - } - - TARGET(GET_ITER) { - /* before: [obj]; after [getiter(obj)] */ - PyObject *iterable = TOP(); - PyObject *iter = PyObject_GetIter(iterable); - Py_DECREF(iterable); - SET_TOP(iter); - if (iter == NULL) - goto error; - DISPATCH(); - } - - TARGET(GET_YIELD_FROM_ITER) { - /* before: [obj]; after [getiter(obj)] */ - PyObject *iterable = TOP(); - PyObject *iter; - if (PyCoro_CheckExact(iterable)) { - /* `iterable` is a coroutine */ - if (!(frame->f_code->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) { - /* and it is used in a 'yield from' expression of a - regular generator. */ - Py_DECREF(iterable); - SET_TOP(NULL); - _PyErr_SetString(tstate, PyExc_TypeError, - "cannot 'yield from' a coroutine object " - "in a non-coroutine generator"); - goto error; - } - } - else if (!PyGen_CheckExact(iterable)) { - /* `iterable` is not a generator. */ - iter = PyObject_GetIter(iterable); - Py_DECREF(iterable); - SET_TOP(iter); - if (iter == NULL) - goto error; - } - PREDICT(LOAD_CONST); - DISPATCH(); - } - - TARGET(FOR_ITER) { - PREDICTED(FOR_ITER); - /* before: [iter]; after: [iter, iter()] *or* [] */ - PyObject *iter = TOP(); - PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); - if (next != NULL) { - PUSH(next); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); - DISPATCH(); - } - if (_PyErr_Occurred(tstate)) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - goto error; - } - else if (tstate->c_tracefunc != NULL) { - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); - } - _PyErr_Clear(tstate); - } - /* iterator ended normally */ - assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR); - STACK_SHRINK(1); - Py_DECREF(iter); - /* Skip END_FOR */ - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); - DISPATCH(); - } - - TARGET(FOR_ITER_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyForIterCache *cache = (_PyForIterCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - next_instr--; - _Py_Specialize_ForIter(TOP(), next_instr); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(FOR_ITER, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(FOR_ITER); - } - } - - TARGET(FOR_ITER_LIST) { - assert(cframe.use_tracing == 0); - _PyListIterObject *it = (_PyListIterObject *)TOP(); - DEOPT_IF(Py_TYPE(it) != &PyListIter_Type, FOR_ITER); - STAT_INC(FOR_ITER, hit); - PyListObject *seq = it->it_seq; - if (seq) { - if (it->it_index < PyList_GET_SIZE(seq)) { - PyObject *next = PyList_GET_ITEM(seq, it->it_index++); - Py_INCREF(next); - PUSH(next); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); - DISPATCH(); - } - it->it_seq = NULL; - Py_DECREF(seq); - } - STACK_SHRINK(1); - Py_DECREF(it); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); - DISPATCH(); - } - - TARGET(FOR_ITER_RANGE) { - assert(cframe.use_tracing == 0); - _PyRangeIterObject *r = (_PyRangeIterObject *)TOP(); - DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); - STAT_INC(FOR_ITER, hit); - _Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER]; - assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST); - if (r->index >= r->len) { - STACK_SHRINK(1); - Py_DECREF(r); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); - DISPATCH(); - } - long value = (long)(r->start + - (unsigned long)(r->index++) * r->step); - if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) { - goto error; - } - // The STORE_FAST is already done. - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + 1); - DISPATCH(); - } - - TARGET(BEFORE_ASYNC_WITH) { - PyObject *mgr = TOP(); - PyObject *res; - PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); - if (enter == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "asynchronous context manager protocol", - Py_TYPE(mgr)->tp_name); - } - goto error; - } - PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); - if (exit == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "asynchronous context manager protocol " - "(missed __aexit__ method)", - Py_TYPE(mgr)->tp_name); - } - Py_DECREF(enter); - goto error; - } - SET_TOP(exit); - Py_DECREF(mgr); - res = _PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) - goto error; - PUSH(res); - PREDICT(GET_AWAITABLE); - DISPATCH(); - } - - TARGET(BEFORE_WITH) { - PyObject *mgr = TOP(); - PyObject *res; - PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__)); - if (enter == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "context manager protocol", - Py_TYPE(mgr)->tp_name); - } - goto error; - } - PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); - if (exit == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "context manager protocol " - "(missed __exit__ method)", - Py_TYPE(mgr)->tp_name); - } - Py_DECREF(enter); - goto error; - } - SET_TOP(exit); - Py_DECREF(mgr); - res = _PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) { - goto error; - } - PUSH(res); - DISPATCH(); - } - - TARGET(WITH_EXCEPT_START) { - /* At the top of the stack are 4 values: - - TOP = exc_info() - - SECOND = previous exception - - THIRD: lasti of exception in exc_info() - - FOURTH: the context.__exit__ bound method - We call FOURTH(type(TOP), TOP, GetTraceback(TOP)). - Then we push the __exit__ return value. - */ - PyObject *exit_func; - PyObject *exc, *val, *tb, *res; - - val = TOP(); - assert(val && PyExceptionInstance_Check(val)); - exc = PyExceptionInstance_Class(val); - tb = PyException_GetTraceback(val); - Py_XDECREF(tb); - assert(PyLong_Check(PEEK(3))); - exit_func = PEEK(4); - PyObject *stack[4] = {NULL, exc, val, tb}; - res = PyObject_Vectorcall(exit_func, stack + 1, - 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); - if (res == NULL) - goto error; - - PUSH(res); - DISPATCH(); - } - - TARGET(PUSH_EXC_INFO) { - PyObject *value = TOP(); - - _PyErr_StackItem *exc_info = tstate->exc_info; - if (exc_info->exc_value != NULL) { - SET_TOP(exc_info->exc_value); - } - else { - Py_INCREF(Py_None); - SET_TOP(Py_None); - } - - Py_INCREF(value); - PUSH(value); - assert(PyExceptionInstance_Check(value)); - exc_info->exc_value = value; - - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { - /* Cached method object */ - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; - DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != - read_u32(cache->keys_version), LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_WITH_DICT) { - /* Can be either a managed dict, or a tp_dictoffset offset.*/ - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - - DEOPT_IF(self_cls->tp_version_tag != read_u32(cache->type_version), - LOAD_ATTR); - /* Treat index as a signed 16 bit value */ - Py_ssize_t dictoffset = self_cls->tp_dictoffset; - assert(dictoffset > 0); - PyDictObject **dictptr = (PyDictObject**)(((char *)self)+dictoffset); - PyDictObject *dict = *dictptr; - DEOPT_IF(dict == NULL, LOAD_ATTR); - DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->keys_version), - LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_NO_DICT) { - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_dictoffset == 0); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - Py_ssize_t dictoffset = self_cls->tp_dictoffset; - assert(dictoffset > 0); - PyObject *dict = *(PyObject **)((char *)self + dictoffset); - /* This object has a __dict__, just not yet created */ - DEOPT_IF(dict != NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { - DEOPT_IF(is_method(stack_pointer, oparg), CALL); - PyObject *function = PEEK(oparg + 1); - DEOPT_IF(Py_TYPE(function) != &PyMethod_Type, CALL); - STAT_INC(CALL, hit); - PyObject *meth = ((PyMethodObject *)function)->im_func; - PyObject *self = ((PyMethodObject *)function)->im_self; - Py_INCREF(meth); - Py_INCREF(self); - PEEK(oparg + 1) = self; - PEEK(oparg + 2) = meth; - Py_DECREF(function); - GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); - } - - TARGET(KW_NAMES) { - assert(call_shape.kwnames == NULL); - assert(oparg < PyTuple_GET_SIZE(consts)); - call_shape.kwnames = GETITEM(consts, oparg); - DISPATCH(); - } - - TARGET(CALL) { - PREDICTED(CALL); - int total_args, is_meth; - is_meth = is_method(stack_pointer, oparg); - PyObject *function = PEEK(oparg + 1); - if (!is_meth && Py_TYPE(function) == &PyMethod_Type) { - PyObject *meth = ((PyMethodObject *)function)->im_func; - PyObject *self = ((PyMethodObject *)function)->im_self; - Py_INCREF(meth); - Py_INCREF(self); - PEEK(oparg+1) = self; - PEEK(oparg+2) = meth; - Py_DECREF(function); - is_meth = 1; - } - total_args = oparg + is_meth; - function = PEEK(total_args + 1); - int positional_args = total_args - KWNAMES_LEN(); - // Check if the call can be inlined or not - if (Py_TYPE(function) == &PyFunction_Type && - tstate->interp->eval_frame == NULL && - ((PyFunctionObject *)function)->vectorcall == _PyFunction_Vectorcall) - { - int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags; - PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(function)); - STACK_SHRINK(total_args); - _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)function, locals, - stack_pointer, positional_args, call_shape.kwnames - ); - call_shape.kwnames = NULL; - STACK_SHRINK(2-is_meth); - // The frame has stolen all the arguments from the stack, - // so there is no need to clean them up. - if (new_frame == NULL) { - goto error; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - cframe.current_frame = frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - /* Callable is not a normal Python function */ - PyObject *res; - if (cframe.use_tracing) { - res = trace_call_function( - tstate, function, stack_pointer-total_args, - positional_args, call_shape.kwnames); - } - else { - res = PyObject_Vectorcall( - function, stack_pointer-total_args, - positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, - call_shape.kwnames); - } - call_shape.kwnames = NULL; - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - Py_DECREF(function); - /* Clear the stack */ - STACK_SHRINK(total_args); - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - STACK_SHRINK(2-is_meth); - PUSH(res); - if (res == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_ADAPTIVE) { - _PyCallCache *cache = (_PyCallCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - next_instr--; - int is_meth = is_method(stack_pointer, oparg); - int nargs = oparg + is_meth; - PyObject *callable = PEEK(nargs + 1); - int err = _Py_Specialize_Call(callable, next_instr, nargs, - call_shape.kwnames); - if (err < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(CALL, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(CALL); - } - } - - TARGET(CALL_PY_EXACT_ARGS) { - PREDICTED(CALL_PY_EXACT_ARGS); - assert(call_shape.kwnames == NULL); - DEOPT_IF(tstate->interp->eval_frame, CALL); - _PyCallCache *cache = (_PyCallCache *)next_instr; - int is_meth = is_method(stack_pointer, oparg); - int argcount = oparg + is_meth; - PyObject *callable = PEEK(argcount + 1); - DEOPT_IF(!PyFunction_Check(callable), CALL); - PyFunctionObject *func = (PyFunctionObject *)callable; - DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); - PyCodeObject *code = (PyCodeObject *)func->func_code; - DEOPT_IF(code->co_argcount != argcount, CALL); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); - STAT_INC(CALL, hit); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); - CALL_STAT_INC(inlined_py_calls); - STACK_SHRINK(argcount); - for (int i = 0; i < argcount; i++) { - new_frame->localsplus[i] = stack_pointer[i]; - } - for (int i = argcount; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - STACK_SHRINK(2-is_meth); - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - goto start_frame; - } - - TARGET(CALL_PY_WITH_DEFAULTS) { - assert(call_shape.kwnames == NULL); - DEOPT_IF(tstate->interp->eval_frame, CALL); - _PyCallCache *cache = (_PyCallCache *)next_instr; - int is_meth = is_method(stack_pointer, oparg); - int argcount = oparg + is_meth; - PyObject *callable = PEEK(argcount + 1); - DEOPT_IF(!PyFunction_Check(callable), CALL); - PyFunctionObject *func = (PyFunctionObject *)callable; - DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); - PyCodeObject *code = (PyCodeObject *)func->func_code; - DEOPT_IF(argcount > code->co_argcount, CALL); - int minargs = cache->min_args; - DEOPT_IF(argcount < minargs, CALL); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); - STAT_INC(CALL, hit); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); - CALL_STAT_INC(inlined_py_calls); - STACK_SHRINK(argcount); - for (int i = 0; i < argcount; i++) { - new_frame->localsplus[i] = stack_pointer[i]; - } - for (int i = argcount; i < code->co_argcount; i++) { - PyObject *def = PyTuple_GET_ITEM(func->func_defaults, - i - minargs); - Py_INCREF(def); - new_frame->localsplus[i] = def; - } - for (int i = code->co_argcount; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - STACK_SHRINK(2-is_meth); - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - goto start_frame; - } - - TARGET(CALL_NO_KW_TYPE_1) { - assert(call_shape.kwnames == NULL); - assert(cframe.use_tracing == 0); - assert(oparg == 1); - DEOPT_IF(is_method(stack_pointer, 1), CALL); - PyObject *obj = TOP(); - PyObject *callable = SECOND(); - DEOPT_IF(callable != (PyObject *)&PyType_Type, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *res = Py_NewRef(Py_TYPE(obj)); - Py_DECREF(callable); - Py_DECREF(obj); - STACK_SHRINK(2); - SET_TOP(res); - DISPATCH(); - } - - TARGET(CALL_NO_KW_STR_1) { - assert(call_shape.kwnames == NULL); - assert(cframe.use_tracing == 0); - assert(oparg == 1); - DEOPT_IF(is_method(stack_pointer, 1), CALL); - PyObject *callable = PEEK(2); - DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *arg = TOP(); - PyObject *res = PyObject_Str(arg); - Py_DECREF(arg); - Py_DECREF(&PyUnicode_Type); - STACK_SHRINK(2); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_NO_KW_TUPLE_1) { - assert(call_shape.kwnames == NULL); - assert(oparg == 1); - DEOPT_IF(is_method(stack_pointer, 1), CALL); - PyObject *callable = PEEK(2); - DEOPT_IF(callable != (PyObject *)&PyTuple_Type, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *arg = TOP(); - PyObject *res = PySequence_Tuple(arg); - Py_DECREF(arg); - Py_DECREF(&PyTuple_Type); - STACK_SHRINK(2); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_BUILTIN_CLASS) { - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - int kwnames_len = KWNAMES_LEN(); - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyType_Check(callable), CALL); - PyTypeObject *tp = (PyTypeObject *)callable; - DEOPT_IF(tp->tp_vectorcall == NULL, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - STACK_SHRINK(total_args); - PyObject *res = tp->tp_vectorcall((PyObject *)tp, stack_pointer, - total_args-kwnames_len, call_shape.kwnames); - call_shape.kwnames = NULL; - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - Py_DECREF(tp); - STACK_SHRINK(1-is_meth); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_NO_KW_BUILTIN_O) { - assert(cframe.use_tracing == 0); - /* Builtin METH_O functions */ - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - DEOPT_IF(total_args != 1, CALL); - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); - DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); - // This is slower but CPython promises to check all non-vectorcall - // function calls. - if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { - goto error; - } - PyObject *arg = TOP(); - PyObject *res = cfunc(PyCFunction_GET_SELF(callable), arg); - _Py_LeaveRecursiveCallTstate(tstate); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - Py_DECREF(arg); - Py_DECREF(callable); - STACK_SHRINK(2-is_meth); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_NO_KW_BUILTIN_FAST) { - assert(cframe.use_tracing == 0); - /* Builtin METH_FASTCALL functions, without keywords */ - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); - DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL, - CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); - STACK_SHRINK(total_args); - /* res = func(self, args, nargs) */ - PyObject *res = ((_PyCFunctionFast)(void(*)(void))cfunc)( - PyCFunction_GET_SELF(callable), - stack_pointer, - total_args); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - STACK_SHRINK(2-is_meth); - PUSH(res); - Py_DECREF(callable); - if (res == NULL) { - /* Not deopting because this doesn't mean our optimization was - wrong. `res` can be NULL for valid reasons. Eg. getattr(x, - 'invalid'). In those cases an exception is set, so we must - handle it. - */ - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); + return -1; + } + } + if (tstate->c_profilefunc) { + if (call_trace_protected(tstate->c_profilefunc, tstate->c_profileobj, + tstate, frame, PyTrace_RETURN, retval)) { + return -1; } + } + return 0; +} - TARGET(CALL_BUILTIN_FAST_WITH_KEYWORDS) { - assert(cframe.use_tracing == 0); - /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); - DEOPT_IF(PyCFunction_GET_FLAGS(callable) != - (METH_FASTCALL | METH_KEYWORDS), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - STACK_SHRINK(total_args); - /* res = func(self, args, nargs, kwnames) */ - _PyCFunctionFastWithKeywords cfunc = - (_PyCFunctionFastWithKeywords)(void(*)(void)) - PyCFunction_GET_FUNCTION(callable); - PyObject *res = cfunc( - PyCFunction_GET_SELF(callable), - stack_pointer, - total_args - KWNAMES_LEN(), - call_shape.kwnames - ); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - call_shape.kwnames = NULL; - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - STACK_SHRINK(2-is_meth); - PUSH(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); +int _Py_CheckRecursiveCallPy( + PyThreadState *tstate) +{ + if (tstate->recursion_headroom) { + if (tstate->py_recursion_remaining < -50) { + /* Overflowing while handling an overflow. Give up. */ + Py_FatalError("Cannot recover from Python stack overflow."); } - - TARGET(CALL_NO_KW_LEN) { - assert(cframe.use_tracing == 0); - assert(call_shape.kwnames == NULL); - /* len(o) */ - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - DEOPT_IF(total_args != 1, CALL); - PyObject *callable = PEEK(total_args + 1); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(callable != interp->callable_cache.len, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *arg = TOP(); - Py_ssize_t len_i = PyObject_Length(arg); - if (len_i < 0) { - goto error; - } - PyObject *res = PyLong_FromSsize_t(len_i); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(callable); - Py_DECREF(arg); - if (res == NULL) { - goto error; - } - DISPATCH(); + } + else { + if (tstate->py_recursion_remaining <= 0) { + tstate->recursion_headroom++; + _PyErr_Format(tstate, PyExc_RecursionError, + "maximum recursion depth exceeded"); + tstate->recursion_headroom--; + return -1; } + } + return 0; +} - TARGET(CALL_NO_KW_ISINSTANCE) { - assert(cframe.use_tracing == 0); - assert(call_shape.kwnames == NULL); - /* isinstance(o, o2) */ - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(total_args != 2, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *cls = POP(); - PyObject *inst = TOP(); - int retval = PyObject_IsInstance(inst, cls); - if (retval < 0) { - Py_DECREF(cls); - goto error; - } - PyObject *res = PyBool_FromLong(retval); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(inst); - Py_DECREF(cls); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - DISPATCH(); - } +static inline int _Py_EnterRecursivePy(PyThreadState *tstate) { + return (tstate->py_recursion_remaining-- <= 0) && + _Py_CheckRecursiveCallPy(tstate); +} - TARGET(CALL_NO_KW_LIST_APPEND) { - assert(cframe.use_tracing == 0); - assert(call_shape.kwnames == NULL); - assert(oparg == 1); - PyObject *callable = PEEK(3); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(callable != interp->callable_cache.list_append, CALL); - PyObject *list = SECOND(); - DEOPT_IF(!PyList_Check(list), CALL); - STAT_INC(CALL, hit); - // CALL + POP_TOP - JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); - assert(_Py_OPCODE(next_instr[-1]) == POP_TOP); - PyObject *arg = POP(); - if (_PyList_AppendTakeRef((PyListObject *)list, arg) < 0) { - goto error; - } - STACK_SHRINK(2); - Py_DECREF(list); - Py_DECREF(callable); - DISPATCH(); - } - TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - DEOPT_IF(total_args != 2, CALL); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - DEOPT_IF(meth->ml_flags != METH_O, CALL); - PyObject *arg = TOP(); - PyObject *self = SECOND(); - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = meth->ml_meth; - // This is slower but CPython promises to check all non-vectorcall - // function calls. - if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { - goto error; - } - PyObject *res = cfunc(self, arg); - _Py_LeaveRecursiveCallTstate(tstate); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - Py_DECREF(self); - Py_DECREF(arg); - STACK_SHRINK(oparg + 1); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate) { + tstate->py_recursion_remaining++; +} - TARGET(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); - PyTypeObject *d_type = callable->d_common.d_type; - PyObject *self = PEEK(total_args); - DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - int nargs = total_args-1; - STACK_SHRINK(nargs); - _PyCFunctionFastWithKeywords cfunc = - (_PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; - PyObject *res = cfunc(self, stack_pointer, nargs - KWNAMES_LEN(), - call_shape.kwnames); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - call_shape.kwnames = NULL; - - /* Free the arguments. */ - for (int i = 0; i < nargs; i++) { - Py_DECREF(stack_pointer[i]); - } - Py_DECREF(self); - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { - assert(call_shape.kwnames == NULL); - assert(oparg == 0 || oparg == 1); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - DEOPT_IF(total_args != 1, CALL); - PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - PyObject *self = TOP(); - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); - DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = meth->ml_meth; - // This is slower but CPython promises to check all non-vectorcall - // function calls. - if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { - goto error; - } - PyObject *res = cfunc(self, NULL); - _Py_LeaveRecursiveCallTstate(tstate); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - Py_DECREF(self); - STACK_SHRINK(oparg + 1); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +// GH-89279: Must be a macro to be sure it's inlined by MSVC. +#define is_method(stack_pointer, args) (PEEK((args)+2) != NULL) - TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - /* Builtin METH_FASTCALL methods, without keywords */ - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); - PyObject *self = PEEK(total_args); - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - _PyCFunctionFast cfunc = - (_PyCFunctionFast)(void(*)(void))meth->ml_meth; - int nargs = total_args-1; - STACK_SHRINK(nargs); - PyObject *res = cfunc(self, stack_pointer, nargs); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Clear the stack of the arguments. */ - for (int i = 0; i < nargs; i++) { - Py_DECREF(stack_pointer[i]); - } - Py_DECREF(self); - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +#define KWNAMES_LEN() \ + (kwnames == NULL ? 0 : ((int)PyTuple_GET_SIZE(kwnames))) + +/* Disable unused label warnings. They are handy for debugging, even + if computed gotos aren't used. */ + +/* TBD - what about other compilers? */ +#if defined(__GNUC__) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wunused-label" +#elif defined(_MSC_VER) /* MS_WINDOWS */ +# pragma warning(push) +# pragma warning(disable:4102) +#endif - TARGET(CALL_FUNCTION_EX) { - PREDICTED(CALL_FUNCTION_EX); - PyObject *func, *callargs, *kwargs = NULL, *result; - if (oparg & 0x01) { - kwargs = POP(); - // DICT_MERGE is called before this opcode if there are kwargs. - // It converts all dict subtypes in kwargs into regular dicts. - assert(PyDict_CheckExact(kwargs)); - } - callargs = POP(); - func = TOP(); - if (!PyTuple_CheckExact(callargs)) { - if (check_args_iterable(tstate, func, callargs) < 0) { - Py_DECREF(callargs); - goto error; - } - Py_SETREF(callargs, PySequence_Tuple(callargs)); - if (callargs == NULL) { - goto error; - } - } - assert(PyTuple_CheckExact(callargs)); +PyObject* _Py_HOT_FUNCTION +_PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) +{ + _Py_EnsureTstateNotNULL(tstate); + CALL_STAT_INC(pyeval_calls); - result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); - Py_DECREF(func); - Py_DECREF(callargs); - Py_XDECREF(kwargs); - - STACK_SHRINK(1); - assert(TOP() == NULL); - SET_TOP(result); - if (result == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +#if USE_COMPUTED_GOTOS +/* Import the static jump table */ +#include "opcode_targets.h" +#endif - TARGET(MAKE_FUNCTION) { - PyObject *codeobj = POP(); - PyFunctionObject *func = (PyFunctionObject *) - PyFunction_New(codeobj, GLOBALS()); +#ifdef Py_STATS + int lastopcode = 0; +#endif + // opcode is an 8-bit value to improve the code generated by MSVC + // for the big switch below (in combination with the EXTRA_CASES macro). + uint8_t opcode; /* Current opcode */ + int oparg; /* Current opcode argument, if any */ + int oparg1; + int oparg2; + int oparg3; + _Py_atomic_int * const eval_breaker = &tstate->interp->ceval.eval_breaker; +#ifdef LLTRACE + int lltrace = 0; +#endif - Py_DECREF(codeobj); - if (func == NULL) { - goto error; - } + _PyCFrame cframe; + _PyInterpreterFrame entry_frame; + PyObject *kwnames = NULL; // Borrowed reference. Reset by CALL instructions. - if (oparg & 0x08) { - assert(PyTuple_CheckExact(TOP())); - func->func_closure = POP(); - } - if (oparg & 0x04) { - assert(PyTuple_CheckExact(TOP())); - func->func_annotations = POP(); - } - if (oparg & 0x02) { - assert(PyDict_CheckExact(TOP())); - func->func_kwdefaults = POP(); - } - if (oparg & 0x01) { - assert(PyTuple_CheckExact(TOP())); - func->func_defaults = POP(); - } + /* WARNING: Because the _PyCFrame lives on the C stack, + * but can be accessed from a heap allocated object (tstate) + * strict stack discipline must be maintained. + */ + _PyCFrame *prev_cframe = tstate->cframe; + cframe.use_tracing = prev_cframe->use_tracing; + cframe.previous = prev_cframe; + tstate->cframe = &cframe; - PUSH((PyObject *)func); - DISPATCH(); - } + assert(tstate->interp->interpreter_trampoline != NULL); +#ifdef Py_DEBUG + /* Set these to invalid but identifiable values for debugging. */ + entry_frame.f_funcobj = (PyObject*)0xaaa0; + entry_frame.f_locals = (PyObject*)0xaaa1; + entry_frame.frame_obj = (PyFrameObject*)0xaaa2; + entry_frame.f_globals = (PyObject*)0xaaa3; + entry_frame.f_builtins = (PyObject*)0xaaa4; +#endif + entry_frame.f_code = tstate->interp->interpreter_trampoline; + entry_frame.prev_instr = + _PyCode_CODE(tstate->interp->interpreter_trampoline); + entry_frame.stacktop = 0; + entry_frame.owner = FRAME_OWNED_BY_CSTACK; + entry_frame.yield_offset = 0; + /* Push frame */ + entry_frame.previous = prev_cframe->current_frame; + frame->previous = &entry_frame; + cframe.current_frame = frame; - TARGET(RETURN_GENERATOR) { - assert(PyFunction_Check(frame->f_funcobj)); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; - PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); - if (gen == NULL) { - goto error; - } - assert(EMPTY()); - _PyFrame_SetStackPointer(frame, stack_pointer); - _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; - _PyFrame_Copy(frame, gen_frame); - assert(frame->frame_obj == NULL); - gen->gi_frame_state = FRAME_CREATED; - gen_frame->owner = FRAME_OWNED_BY_GENERATOR; - _Py_LeaveRecursiveCallPy(tstate); - if (!frame->is_entry) { - _PyInterpreterFrame *prev = frame->previous; - _PyThreadState_PopFrame(tstate, frame); - frame = cframe.current_frame = prev; - _PyFrame_StackPush(frame, (PyObject *)gen); - goto resume_frame; - } - _Py_LeaveRecursiveCallTstate(tstate); - /* Make sure that frame is in a valid state */ - frame->stacktop = 0; - frame->f_locals = NULL; - Py_INCREF(frame->f_funcobj); - Py_INCREF(frame->f_code); - /* Restore previous cframe and return. */ - tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; - assert(tstate->cframe->current_frame == frame->previous); - assert(!_PyErr_Occurred(tstate)); - return (PyObject *)gen; - } + if (_Py_EnterRecursiveCallTstate(tstate, "")) { + tstate->c_recursion_remaining--; + tstate->py_recursion_remaining--; + goto exit_unwind; + } - TARGET(BUILD_SLICE) { - PyObject *start, *stop, *step, *slice; - if (oparg == 3) - step = POP(); - else - step = NULL; - stop = POP(); - start = TOP(); - slice = PySlice_New(start, stop, step); - Py_DECREF(start); - Py_DECREF(stop); - Py_XDECREF(step); - SET_TOP(slice); - if (slice == NULL) - goto error; - DISPATCH(); + /* support for generator.throw() */ + if (throwflag) { + if (_Py_EnterRecursivePy(tstate)) { + goto exit_unwind; } + TRACE_FUNCTION_THROW_ENTRY(); + DTRACE_FUNCTION_ENTRY(); + goto resume_with_error; + } - TARGET(FORMAT_VALUE) { - /* Handles f-string value formatting. */ - PyObject *result; - PyObject *fmt_spec; - PyObject *value; - PyObject *(*conv_fn)(PyObject *); - int which_conversion = oparg & FVC_MASK; - int have_fmt_spec = (oparg & FVS_MASK) == FVS_HAVE_SPEC; - - fmt_spec = have_fmt_spec ? POP() : NULL; - value = POP(); - - /* See if any conversion is specified. */ - switch (which_conversion) { - case FVC_NONE: conv_fn = NULL; break; - case FVC_STR: conv_fn = PyObject_Str; break; - case FVC_REPR: conv_fn = PyObject_Repr; break; - case FVC_ASCII: conv_fn = PyObject_ASCII; break; - default: - _PyErr_Format(tstate, PyExc_SystemError, - "unexpected conversion flag %d", - which_conversion); - goto error; - } + /* Local "register" variables. + * These are cached values from the frame and code object. */ - /* If there's a conversion function, call it and replace - value with that result. Otherwise, just use value, - without conversion. */ - if (conv_fn != NULL) { - result = conv_fn(value); - Py_DECREF(value); - if (result == NULL) { - Py_XDECREF(fmt_spec); - goto error; - } - value = result; - } + PyObject *names; + PyObject *consts; + _Py_CODEUNIT *next_instr; + PyObject **stack_pointer; - /* If value is a unicode object, and there's no fmt_spec, - then we know the result of format(value) is value - itself. In that case, skip calling format(). I plan to - move this optimization in to PyObject_Format() - itself. */ - if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { - /* Do nothing, just transfer ownership to result. */ - result = value; - } else { - /* Actually call format(). */ - result = PyObject_Format(value, fmt_spec); - Py_DECREF(value); - Py_XDECREF(fmt_spec); - if (result == NULL) { - goto error; - } - } +/* Sets the above local variables from the frame */ +#define SET_LOCALS_FROM_FRAME() \ + { \ + PyCodeObject *co = frame->f_code; \ + names = co->co_names; \ + consts = co->co_consts; \ + } \ + assert(_PyInterpreterFrame_LASTI(frame) >= -1); \ + /* Jump back to the last instruction executed... */ \ + if (VERBOSE) { \ + fprintf(stderr, "Jump back to the last instruction executed\n"); \ + fprintf(stderr, "_PyInterpreterFrame_LASTI(frame) = %d\n filename = ", _PyInterpreterFrame_LASTI(frame)); \ + if(!_PyErr_Occurred(tstate)) PyObject_Print(frame->f_code->co_filename, stderr, 0); \ + fprintf(stderr, "\n name = "); \ + if(!_PyErr_Occurred(tstate)) PyObject_Print(frame->f_code->co_name, stderr, 0); \ + fprintf(stderr, "\n"); \ + } \ + next_instr = frame->prev_instr + (_PyInterpreterFrame_LASTI(frame) == -1 ? 1 : OPSIZE); /* TODO: init frame to -OPSIZE? */ \ + stack_pointer = _PyFrame_GetStackPointer(frame); \ + /* Set stackdepth to -1. \ + Update when returning or calling trace function. \ + Having stackdepth <= 0 ensures that invalid \ + values are not visible to the cycle GC. \ + We choose -1 rather than 0 to assist debugging. \ + */ \ + frame->stacktop = -1; - PUSH(result); - DISPATCH(); - } - TARGET(COPY) { - assert(oparg != 0); - PyObject *peek = PEEK(oparg); - Py_INCREF(peek); - PUSH(peek); - DISPATCH(); - } +start_frame: + if (_Py_EnterRecursivePy(tstate)) { + goto exit_unwind; + } - TARGET(BINARY_OP) { - PREDICTED(BINARY_OP); - PyObject *rhs = POP(); - PyObject *lhs = TOP(); - assert(0 <= oparg); - assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); - assert(binary_ops[oparg]); - PyObject *res = binary_ops[oparg](lhs, rhs); - Py_DECREF(lhs); - Py_DECREF(rhs); - SET_TOP(res); - if (res == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } +resume_frame: + SET_LOCALS_FROM_FRAME(); - TARGET(BINARY_OP_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *lhs = SECOND(); - PyObject *rhs = TOP(); - next_instr--; - _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(BINARY_OP, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - GO_TO_INSTRUCTION(BINARY_OP); +#ifdef LLTRACE + { + if (frame != &entry_frame) { + int r = PyDict_Contains(GLOBALS(), &_Py_ID(__lltrace__)); + if (r < 0) { + goto exit_unwind; } + lltrace = r; } - - TARGET(SWAP) { - assert(oparg != 0); - PyObject *top = TOP(); - SET_TOP(PEEK(oparg)); - PEEK(oparg) = top; - DISPATCH(); + if (lltrace) { + lltrace_resume_frame(frame); } + } +#endif - TARGET(EXTENDED_ARG) { - assert(oparg); - oparg <<= 8; - oparg |= _Py_OPARG(*next_instr); - // We might be tracing. To avoid breaking tracing guarantees in - // quickened instructions, always deoptimize the next opcode: - opcode = _PyOpcode_Deopt[_Py_OPCODE(*next_instr)]; - PRE_DISPATCH_GOTO(); - // CPython hasn't traced the following instruction historically - // (DO_TRACING would clobber our extended oparg anyways), so just - // skip our usual cframe.use_tracing check before dispatch. Also, - // make sure the next instruction isn't a RESUME, since that needs - // to trace properly (and shouldn't have an extended arg anyways): - assert(opcode != RESUME); - DISPATCH_GOTO(); - } +#ifdef Py_DEBUG + /* _PyEval_EvalFrameDefault() must not be called with an exception set, + because it can clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!_PyErr_Occurred(tstate)); +#endif - TARGET(EXTENDED_ARG_QUICK) { - assert(cframe.use_tracing == 0); - assert(oparg); - int oldoparg = oparg; - NEXTOPARG(); - oparg |= oldoparg << 8; - DISPATCH_GOTO(); - } + DISPATCH(); - TARGET(CACHE) { - Py_UNREACHABLE(); - } +handle_eval_breaker: + + /* Do periodic things, like check for signals and async I/0. + * We need to do reasonably frequently, but not too frequently. + * All loops should include a check of the eval breaker. + * We also check on return from any builtin function. + */ + if (_Py_HandlePending(tstate) != 0) { + goto error; + } + DISPATCH(); + + { + /* Start instructions */ +#if !USE_COMPUTED_GOTOS + dispatch_opcode: + switch (opcode) +#endif + { + +#include "generated_cases.c.h" #if USE_COMPUTED_GOTOS TARGET_DO_TRACING: @@ -5032,7 +1251,8 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int if (INSTR_OFFSET() >= frame->f_code->_co_firsttraceable) { int instr_prev = _PyInterpreterFrame_LASTI(frame); frame->prev_instr = next_instr; - TRACING_NEXTOPARG(); + NEXTOPARG(); + // No _PyOpcode_Deopt here, since RESUME has no optimized forms: if (opcode == RESUME) { if (oparg < 2) { CHECK_EVAL_BREAKER(); @@ -5070,7 +1290,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int // next_instr wasn't incremented at the start of this // instruction. Increment it before handling the error, // so that it looks the same as a "normal" instruction: - next_instr++; + next_instr += OPSIZE; goto error; } // Reload next_instr. Don't increment it, though, since @@ -5079,8 +1299,29 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } } } - TRACING_NEXTOPARG(); + NEXTOPARG(); PRE_DISPATCH_GOTO(); + // No _PyOpcode_Deopt here, since EXTENDED_ARG has no optimized forms: + while (opcode == EXTENDED_ARG) { + // CPython hasn't ever traced the instruction after an EXTENDED_ARG. + // Inline the EXTENDED_ARG here, so we can avoid branching there: + INSTRUCTION_START(EXTENDED_ARG); + opcode = _Py_OPCODE(*next_instr); + oparg = oparg << 8 | _Py_OPARG(*next_instr); + // Make sure the next instruction isn't a RESUME, since that needs + // to trace properly (and shouldn't have an EXTENDED_ARG, anyways): + assert(opcode != RESUME); + PRE_DISPATCH_GOTO(); + } + opcode = _PyOpcode_Deopt[opcode]; + if (_PyOpcode_Caches[opcode]) { + uint16_t *counter = &next_instr[OPSIZE].cache; + // The instruction is going to decrement the counter, so we need to + // increment it here to make sure it doesn't try to specialize: + if (!ADAPTIVE_COUNTER_IS_MAX(*counter)) { + INCREMENT_ADAPTIVE_COUNTER(*counter); + } + } DISPATCH_GOTO(); } @@ -5105,27 +1346,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int or goto error. */ Py_UNREACHABLE(); -/* Specialization misses */ - -miss: - { - STAT_INC(opcode, miss); - opcode = _PyOpcode_Deopt[opcode]; - STAT_INC(opcode, miss); - /* The counter is always the first cache entry: */ - _Py_CODEUNIT *counter = (_Py_CODEUNIT *)next_instr; - *counter -= 1; - if (*counter == 0) { - int adaptive_opcode = _PyOpcode_Adaptive[opcode]; - assert(adaptive_opcode); - _Py_SET_OPCODE(next_instr[-1], adaptive_opcode); - STAT_INC(opcode, deopt); - *counter = adaptive_counter_start(); - } - next_instr--; - DISPATCH_GOTO(); - } - unbound_local_error: { format_exc_check_arg(tstate, PyExc_UnboundLocalError, @@ -5135,8 +1355,16 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int goto error; } +pop_4_error: + STACK_SHRINK(1); +pop_3_error: + STACK_SHRINK(1); +pop_2_error: + STACK_SHRINK(1); +pop_1_error: + STACK_SHRINK(1); error: - call_shape.kwnames = NULL; + kwnames = NULL; /* Double-check exception status. */ #ifdef NDEBUG if (!_PyErr_Occurred(tstate)) { @@ -5148,6 +1376,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #endif /* Log traceback info. */ + assert(frame != &entry_frame); if (!_PyFrame_IsIncomplete(frame)) { PyFrameObject *f = _PyFrame_GetFrameObject(frame); if (f != NULL) { @@ -5220,7 +1449,12 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int exit_unwind: assert(_PyErr_Occurred(tstate)); _Py_LeaveRecursiveCallPy(tstate); - if (frame->is_entry) { + assert(frame != &entry_frame); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = cframe.current_frame = dying->previous; + _PyEvalFrameClearAndPop(tstate, dying); + if (frame == &entry_frame) { /* Restore previous cframe and exit */ tstate->cframe = cframe.previous; tstate->cframe->use_tracing = cframe.use_tracing; @@ -5228,13 +1462,17 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _Py_LeaveRecursiveCallTstate(tstate); return NULL; } - frame = cframe.current_frame = pop_frame(tstate, frame); resume_with_error: SET_LOCALS_FROM_FRAME(); goto error; } +#if defined(__GNUC__) +# pragma GCC diagnostic pop +#elif defined(_MSC_VER) /* MS_WINDOWS */ +# pragma warning(pop) +#endif static void format_missing(PyThreadState *tstate, const char *kind, @@ -5700,8 +1938,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, for (; i < defcount; i++) { if (localsplus[m+i] == NULL) { PyObject *def = defs[i]; - Py_INCREF(def); - localsplus[m+i] = def; + localsplus[m+i] = Py_NewRef(def); } } } @@ -5717,8 +1954,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, if (func->func_kwdefaults != NULL) { PyObject *def = PyDict_GetItemWithError(func->func_kwdefaults, varname); if (def) { - Py_INCREF(def); - localsplus[i] = def; + localsplus[i] = Py_NewRef(def); continue; } else if (_PyErr_Occurred(tstate)) { @@ -5760,6 +1996,7 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, { PyCodeObject * code = (PyCodeObject *)func->func_code; CALL_STAT_INC(frames_pushed); + int nconsts = (int)PyTuple_Size(code->co_consts); _PyInterpreterFrame *frame = _PyThreadState_PushFrame(tstate, code->co_framesize); if (frame == NULL) { goto fail; @@ -5774,6 +2011,11 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, _PyEvalFrameClearAndPop(tstate, frame); return NULL; } + if (nconsts > 0) { + PyObject **const_regs = localsarray + (code->co_nlocalsplus + code->co_stacksize); + PyObject **consts = &PyTuple_GET_ITEM(code->co_consts, 0); + Py_MEMCPY(const_regs, consts, sizeof(PyObject*) * nconsts); + } return frame; fail: /* Consume the references */ @@ -5791,20 +2033,48 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, } static void -_PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame) +clear_thread_frame(PyThreadState *tstate, _PyInterpreterFrame * frame) { + assert(frame->owner == FRAME_OWNED_BY_THREAD); // Make sure that this is, indeed, the top frame. We can't check this in // _PyThreadState_PopFrame, since f_code is already cleared at that point: assert((PyObject **)frame + frame->f_code->co_framesize == - tstate->datastack_top); + tstate->datastack_top); tstate->c_recursion_remaining--; assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame); - assert(frame->owner == FRAME_OWNED_BY_THREAD); _PyFrame_Clear(frame); tstate->c_recursion_remaining++; _PyThreadState_PopFrame(tstate, frame); } +static void +clear_gen_frame(PyThreadState *tstate, _PyInterpreterFrame * frame) +{ + assert(frame->owner == FRAME_OWNED_BY_GENERATOR); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_CLEARED; + assert(tstate->exc_info == &gen->gi_exc_state); + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + tstate->c_recursion_remaining--; + assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame); + _PyFrame_Clear(frame); + tstate->c_recursion_remaining++; + frame->previous = NULL; +} + +static void +_PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame) +{ + if (frame->owner == FRAME_OWNED_BY_THREAD) { + clear_thread_frame(tstate, frame); + } + else { + clear_gen_frame(tstate, frame); + } +} + + PyObject * _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, PyObject *locals, @@ -5830,13 +2100,7 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, return NULL; } EVAL_CALL_STAT_INC(EVAL_CALL_VECTOR); - PyObject *retval = _PyEval_EvalFrame(tstate, frame, 0); - assert( - _PyFrame_GetStackPointer(frame) == _PyFrame_Stackbase(frame) || - _PyFrame_GetStackPointer(frame) == frame->localsplus - ); - _PyEvalFrameClearAndPop(tstate, frame); - return retval; + return _PyEval_EvalFrame(tstate, frame, 0); } /* Legacy API */ @@ -5881,15 +2145,13 @@ PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals, newargs[i] = args[i]; } for (int i = 0; i < kwcount; i++) { - Py_INCREF(kws[2*i]); - PyTuple_SET_ITEM(kwnames, i, kws[2*i]); + PyTuple_SET_ITEM(kwnames, i, Py_NewRef(kws[2*i])); newargs[argcount+i] = kws[2*i+1]; } allargs = newargs; } for (int i = 0; i < kwcount; i++) { - Py_INCREF(kws[2*i]); - PyTuple_SET_ITEM(kwnames, i, kws[2*i]); + PyTuple_SET_ITEM(kwnames, i, Py_NewRef(kws[2*i])); } PyFrameConstructor constr = { .fc_globals = globals, @@ -6184,8 +2446,7 @@ call_exc_trace(Py_tracefunc func, PyObject *self, int err; _PyErr_Fetch(tstate, &type, &value, &orig_traceback); if (value == NULL) { - value = Py_None; - Py_INCREF(value); + value = Py_NewRef(Py_None); } _PyErr_NormalizeException(tstate, &type, &value, &orig_traceback); traceback = (orig_traceback != NULL) ? orig_traceback : Py_None; @@ -6488,8 +2749,7 @@ _PyEval_SetAsyncGenFirstiter(PyObject *firstiter) return -1; } - Py_XINCREF(firstiter); - Py_XSETREF(tstate->async_gen_firstiter, firstiter); + Py_XSETREF(tstate->async_gen_firstiter, Py_XNewRef(firstiter)); return 0; } @@ -6509,8 +2769,7 @@ _PyEval_SetAsyncGenFinalizer(PyObject *finalizer) return -1; } - Py_XINCREF(finalizer); - Py_XSETREF(tstate->async_gen_finalizer, finalizer); + Py_XSETREF(tstate->async_gen_finalizer, Py_XNewRef(finalizer)); return 0; } diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 9b9d7dc1d1af1e..83f4e91e54573a 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -819,11 +819,10 @@ make_pending_calls(PyInterpreterState *interp) } /* don't perform recursive pending calls */ - static int busy = 0; - if (busy) { + if (interp->ceval.pending.busy) { return 0; } - busy = 1; + interp->ceval.pending.busy = 1; /* unsignal before starting to call callbacks, so that any callback added in-between re-signals */ @@ -851,11 +850,11 @@ make_pending_calls(PyInterpreterState *interp) } } - busy = 0; + interp->ceval.pending.busy = 0; return res; error: - busy = 0; + interp->ceval.pending.busy = 0; SIGNAL_PENDING_CALLS(interp); return res; } diff --git a/Python/clinic/bltinmodule.c.h b/Python/clinic/bltinmodule.c.h index 19930a519be089..89f069dd97f6ea 100644 --- a/Python/clinic/bltinmodule.c.h +++ b/Python/clinic/bltinmodule.c.h @@ -354,8 +354,8 @@ builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[4]) { - dont_inherit = _PyLong_AsInt(args[4]); - if (dont_inherit == -1 && PyErr_Occurred()) { + dont_inherit = PyObject_IsTrue(args[4]); + if (dont_inherit < 0) { goto exit; } if (!--noptargs) { @@ -1215,4 +1215,4 @@ builtin_issubclass(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=3c9497e0ffeb8a30 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=973da43fa65aa727 input=a9049054013a1b77]*/ diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h index 3dc7aa8118a5d7..5678d0ac2a608b 100644 --- a/Python/clinic/sysmodule.c.h +++ b/Python/clinic/sysmodule.c.h @@ -884,33 +884,6 @@ sys_gettotalrefcount(PyObject *module, PyObject *Py_UNUSED(ignored)) #endif /* defined(Py_REF_DEBUG) */ -PyDoc_STRVAR(sys__getquickenedcount__doc__, -"_getquickenedcount($module, /)\n" -"--\n" -"\n"); - -#define SYS__GETQUICKENEDCOUNT_METHODDEF \ - {"_getquickenedcount", (PyCFunction)sys__getquickenedcount, METH_NOARGS, sys__getquickenedcount__doc__}, - -static Py_ssize_t -sys__getquickenedcount_impl(PyObject *module); - -static PyObject * -sys__getquickenedcount(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - PyObject *return_value = NULL; - Py_ssize_t _return_value; - - _return_value = sys__getquickenedcount_impl(module); - if ((_return_value == -1) && PyErr_Occurred()) { - goto exit; - } - return_value = PyLong_FromSsize_t(_return_value); - -exit: - return return_value; -} - PyDoc_STRVAR(sys_getallocatedblocks__doc__, "getallocatedblocks($module, /)\n" "--\n" @@ -1345,4 +1318,4 @@ sys_is_stack_trampoline_active(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef SYS_GETANDROIDAPILEVEL_METHODDEF #define SYS_GETANDROIDAPILEVEL_METHODDEF #endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */ -/*[clinic end generated code: output=2b5e1bc24a3348bd input=a9049054013a1b77]*/ +/*[clinic end generated code: output=79228e569529129c input=a9049054013a1b77]*/ diff --git a/Python/codecs.c b/Python/codecs.c index 33965f885f7064..b2087b499dfdba 100644 --- a/Python/codecs.c +++ b/Python/codecs.c @@ -235,8 +235,7 @@ PyObject *args_tuple(PyObject *object, args = PyTuple_New(1 + (errors != NULL)); if (args == NULL) return NULL; - Py_INCREF(object); - PyTuple_SET_ITEM(args,0,object); + PyTuple_SET_ITEM(args, 0, Py_NewRef(object)); if (errors) { PyObject *v; @@ -263,8 +262,7 @@ PyObject *codec_getitem(const char *encoding, int index) return NULL; v = PyTuple_GET_ITEM(codecs, index); Py_DECREF(codecs); - Py_INCREF(v); - return v; + return Py_NewRef(v); } /* Helper functions to create an incremental codec. */ @@ -430,8 +428,7 @@ _PyCodec_EncodeInternal(PyObject *object, "encoder must return a tuple (object, integer)"); goto onError; } - v = PyTuple_GET_ITEM(result,0); - Py_INCREF(v); + v = Py_NewRef(PyTuple_GET_ITEM(result,0)); /* We don't check or use the second (integer) entry. */ Py_DECREF(args); @@ -475,8 +472,7 @@ _PyCodec_DecodeInternal(PyObject *object, "decoder must return a tuple (object,integer)"); goto onError; } - v = PyTuple_GET_ITEM(result,0); - Py_INCREF(v); + v = Py_NewRef(PyTuple_GET_ITEM(result,0)); /* We don't check or use the second (integer) entry. */ Py_DECREF(args); @@ -571,8 +567,7 @@ PyObject *codec_getitem_checked(const char *encoding, if (codec == NULL) return NULL; - v = PyTuple_GET_ITEM(codec, index); - Py_INCREF(v); + v = Py_NewRef(PyTuple_GET_ITEM(codec, index)); Py_DECREF(codec); return v; } diff --git a/Python/compile.c b/Python/compile.c index f8924789f4e954..acc0738b35cdbc 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -55,6 +55,16 @@ */ #define STACK_USE_GUIDELINE 30 +#undef SUCCESS +#undef ERROR +#define SUCCESS 0 +#define ERROR -1 + +#define RETURN_IF_ERROR(X) \ + if ((X) == -1) { \ + return ERROR; \ + } + /* If we exceed this limit, it should * be considered a compiler bug. * Currently it should be impossible @@ -122,7 +132,7 @@ (opcode) == STORE_FAST__STORE_FAST) #define IS_TOP_LEVEL_AWAIT(c) ( \ - (c->c_flags->cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ + (c->c_flags.cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ && (c->u->u_ste->ste_type == ModuleBlock)) typedef _PyCompilerSrcLocation location; @@ -162,15 +172,58 @@ static struct jump_target_label_ NO_LABEL = {-1}; return 0; \ } +enum oparg_type { + UNUSED_ARG, + EXPLICIT_ARG, /* int value */ + CONST_REG, /* index of const */ + NAME_REG, /* index of name */ + TMP_REG, /* index of tmp */ +}; + +typedef struct oparg_ { + enum oparg_type type; + int value; /* logical value set by codegen */ + int final; /* actual reg value, resolved in assembly */ +} oparg_t; + +#define UNUSED_OPARG ((const oparg_t){.value=(0), .type=UNUSED_ARG}) +#define EXPLICIT_OPARG(V) ((const oparg_t){.value=(V), .type=EXPLICIT_ARG}) +#define CONST_OPARG(V) ((const oparg_t){.value=(V), .type=CONST_REG}) +#define NAME_OPARG(V) ((const oparg_t){.value=(V), .type=NAME_REG}) +#define TMP_OPARG(V) ((const oparg_t){.value=(V), .type=TMP_REG}) + +#define IS_UNUSED(OPARG) ((OPARG).type == UNUSED_OPARG) + struct instr { int i_opcode; int i_oparg; + oparg_t i_oparg1; + oparg_t i_oparg2; + oparg_t i_oparg3; location i_loc; /* The following fields should not be set by the front-end: */ struct basicblock_ *i_target; /* target block (if jump instruction) */ struct basicblock_ *i_except; /* target block when exception is raised */ }; +/* One arg*/ +#define INSTR_SET_OP1(I, OP, ARG) \ + do { \ + assert(HAS_ARG(OP)); \ + struct instr *_instr__ptr_ = (I); \ + _instr__ptr_->i_opcode = (OP); \ + _instr__ptr_->i_oparg = (ARG); \ + } while (0); + +/* No args*/ +#define INSTR_SET_OP0(I, OP) \ + do { \ + assert(!HAS_ARG(OP)); \ + struct instr *_instr__ptr_ = (I); \ + _instr__ptr_->i_opcode = (OP); \ + _instr__ptr_->i_oparg = 0; \ + } while (0); + typedef struct exceptstack { struct basicblock_ *handlers[CO_MAXBLOCKS+1]; int depth; @@ -213,15 +266,28 @@ is_jump(struct instr *i) return IS_JUMP_OPCODE(i->i_opcode); } +static int extended_args(int oparg) +{ + return (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg); +} + static int -instr_size(struct instr *instruction) +instr_size(struct instr *instr) { - int opcode = instruction->i_opcode; + int opcode = instr->i_opcode; assert(!IS_PSEUDO_OPCODE(opcode)); - int oparg = HAS_ARG(opcode) ? instruction->i_oparg : 0; - int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg); + int oparg = instr->i_oparg; + assert(HAS_ARG(opcode) || oparg == 0); + int oparg1 = instr->i_oparg1.type != UNUSED_ARG ? instr->i_oparg1.final : oparg; + int oparg2 = instr->i_oparg2.final; + int oparg3 = instr->i_oparg3.final; + int e1 = extended_args(oparg1); + int e2 = extended_args(oparg2); + int e3 = extended_args(oparg3); + int extended_args = e1 > e2 ? e1 : e2; + extended_args = extended_args > e3 ? extended_args : e3; int caches = _PyOpcode_Caches[opcode]; - return extended_args + 1 + caches; + return OPSIZE * (extended_args + 1) + caches; } static void @@ -229,26 +295,63 @@ write_instr(_Py_CODEUNIT *codestr, struct instr *instruction, int ilen) { int opcode = instruction->i_opcode; assert(!IS_PSEUDO_OPCODE(opcode)); - int oparg = HAS_ARG(opcode) ? instruction->i_oparg : 0; + int oparg = instruction->i_oparg; + assert(HAS_ARG(opcode) || oparg == 0); int caches = _PyOpcode_Caches[opcode]; - switch (ilen - caches) { + int oparg1 = instruction->i_oparg1.type != UNUSED_ARG ? + instruction->i_oparg1.final : oparg; + int oparg2 = instruction->i_oparg2.final; + int oparg3 = instruction->i_oparg3.final; + +if (0) { + if (opcode == LOAD_FAST_R || opcode == STORE_FAST_R) + { + fprintf(stderr, + "write_instr [%d]: oparg = %d oparg1 = %d oparg2 = %d oparg3 = %d\n", + opcode, oparg, oparg1, oparg2, oparg3); + } +} + + switch ((ilen - caches)/OPSIZE) { case 4: - *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG, (oparg >> 24) & 0xFF); + codestr->opcode = EXTENDED_ARG; + codestr->oparg = (oparg1 >> 24) & 0xFF; + codestr++; + codestr->oparg2 = (oparg2 >> 24) & 0xFF; + codestr->oparg3 = (oparg3 >> 24) & 0xFF; + codestr++; /* fall through */ case 3: - *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG, (oparg >> 16) & 0xFF); + codestr->opcode = EXTENDED_ARG; + codestr->oparg = (oparg1 >> 16) & 0xFF; + codestr++; + codestr->oparg2 = (oparg2 >> 16) & 0xFF; + codestr->oparg3 = (oparg3 >> 16) & 0xFF; + codestr++; /* fall through */ case 2: - *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG, (oparg >> 8) & 0xFF); + codestr->opcode = EXTENDED_ARG; + codestr->oparg = (oparg1 >> 8) & 0xFF; + codestr++; + codestr->oparg2 = (oparg2 >> 8) & 0xFF; + codestr->oparg3 = (oparg3 >> 8) & 0xFF; + codestr++; /* fall through */ case 1: - *codestr++ = _Py_MAKECODEUNIT(opcode, oparg & 0xFF); + codestr->opcode = opcode; + codestr->oparg = oparg1 & 0xFF; + codestr++; + codestr->oparg2 = oparg2 & 0xFF; + codestr->oparg3 = oparg3 & 0XFF; + codestr++; break; default: Py_UNREACHABLE(); } while (caches--) { - *codestr++ = _Py_MAKECODEUNIT(CACHE, 0); + codestr->opcode = CACHE; + codestr->oparg = 0; + codestr++; } } @@ -400,6 +503,7 @@ struct compiler_unit { struct fblockinfo u_fblock[CO_MAXBLOCKS]; int u_firstlineno; /* the first lineno of the block */ + int u_ntmps; /* number of temporary registers */ }; /* This struct captures the global state of a compilation. @@ -417,8 +521,8 @@ handled by the symbol analysis pass. struct compiler { PyObject *c_filename; struct symtable *c_st; - PyFutureFeatures *c_future; /* pointer to module's __future__ */ - PyCompilerFlags *c_flags; + PyFutureFeatures c_future; /* module's __future__ */ + PyCompilerFlags c_flags; int c_optimize; /* optimization level */ int c_interactive; /* true if in interactive mode */ @@ -428,6 +532,7 @@ struct compiler { struct compiler_unit *u; /* compiler state for current block */ PyObject *c_stack; /* Python list holding compiler_unit ptrs */ PyArena *c_arena; /* pointer to memory allocation arena */ + bool c_regcode; /* produce regmachine code for this file */ }; #define CFG_BUILDER(c) (&((c)->u->u_cfg_builder)) @@ -478,7 +583,7 @@ static int compiler_annassign(struct compiler *, stmt_ty); static int compiler_subscript(struct compiler *, expr_ty); static int compiler_slice(struct compiler *, expr_ty); -static int are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t); +static bool are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t); static int compiler_with(struct compiler *, stmt_ty, int); @@ -507,13 +612,12 @@ static int compiler_async_comprehension_generator( int depth, expr_ty elt, expr_ty val, int type); -static int compiler_pattern(struct compiler *, location *, - pattern_ty, pattern_context *); +static int compiler_pattern(struct compiler *, pattern_ty, pattern_context *); static int compiler_match(struct compiler *, stmt_ty); -static int compiler_pattern_subpattern(struct compiler *, location *, +static int compiler_pattern_subpattern(struct compiler *, pattern_ty, pattern_context *); -static void remove_redundant_nops(basicblock *bb); +static int remove_redundant_nops(basicblock *bb); static PyCodeObject *assemble(struct compiler *, int addNone); @@ -530,8 +634,7 @@ _Py_Mangle(PyObject *privateobj, PyObject *ident) if (privateobj == NULL || !PyUnicode_Check(privateobj) || PyUnicode_READ_CHAR(ident, 0) != '_' || PyUnicode_READ_CHAR(ident, 1) != '_') { - Py_INCREF(ident); - return ident; + return Py_NewRef(ident); } nlen = PyUnicode_GET_LENGTH(ident); plen = PyUnicode_GET_LENGTH(privateobj); @@ -547,16 +650,14 @@ _Py_Mangle(PyObject *privateobj, PyObject *ident) if ((PyUnicode_READ_CHAR(ident, nlen-1) == '_' && PyUnicode_READ_CHAR(ident, nlen-2) == '_') || PyUnicode_FindChar(ident, '.', 0, nlen, 1) != -1) { - Py_INCREF(ident); - return ident; /* Don't mangle __whatever__ */ + return Py_NewRef(ident); /* Don't mangle __whatever__ */ } /* Strip leading underscores from class name */ ipriv = 0; while (PyUnicode_READ_CHAR(privateobj, ipriv) == '_') ipriv++; if (ipriv == plen) { - Py_INCREF(ident); - return ident; /* Don't mangle if class is just underscores */ + return Py_NewRef(ident); /* Don't mangle if class is just underscores */ } plen -= ipriv; @@ -587,70 +688,86 @@ _Py_Mangle(PyObject *privateobj, PyObject *ident) return result; } + static int -compiler_init(struct compiler *c) +compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename, + PyCompilerFlags flags, int optimize, PyArena *arena) { - memset(c, 0, sizeof(struct compiler)); - c->c_const_cache = PyDict_New(); if (!c->c_const_cache) { - return 0; + return ERROR; } c->c_stack = PyList_New(0); if (!c->c_stack) { - Py_CLEAR(c->c_const_cache); - return 0; + return ERROR; } - return 1; -} + c->c_filename = Py_NewRef(filename); + const char *f = PyUnicode_AsUTF8(c->c_filename); + if (f == NULL) { + PyErr_Clear(); + c->c_regcode = false; + } + else { + c->c_regcode = strstr(f, "mytest"); + } -PyCodeObject * -_PyAST_Compile(mod_ty mod, PyObject *filename, PyCompilerFlags *flags, - int optimize, PyArena *arena) -{ - struct compiler c; - PyCodeObject *co = NULL; - PyCompilerFlags local_flags = _PyCompilerFlags_INIT; - int merged; - if (!compiler_init(&c)) - return NULL; - Py_INCREF(filename); - c.c_filename = filename; - c.c_arena = arena; - c.c_future = _PyFuture_FromAST(mod, filename); - if (c.c_future == NULL) - goto finally; - if (!flags) { - flags = &local_flags; + c->c_arena = arena; + if (!_PyFuture_FromAST(mod, filename, &c->c_future)) { + return ERROR; } - merged = c.c_future->ff_features | flags->cf_flags; - c.c_future->ff_features = merged; - flags->cf_flags = merged; - c.c_flags = flags; - c.c_optimize = (optimize == -1) ? _Py_GetConfig()->optimization_level : optimize; - c.c_nestlevel = 0; + int merged = c->c_future.ff_features | flags.cf_flags; + c->c_future.ff_features = merged; + flags.cf_flags = merged; + c->c_flags = flags; + c->c_optimize = (optimize == -1) ? _Py_GetConfig()->optimization_level : optimize; + c->c_nestlevel = 0; _PyASTOptimizeState state; - state.optimize = c.c_optimize; + state.optimize = c->c_optimize; state.ff_features = merged; if (!_PyAST_Optimize(mod, arena, &state)) { - goto finally; + return ERROR; } - - c.c_st = _PySymtable_Build(mod, filename, c.c_future); - if (c.c_st == NULL) { - if (!PyErr_Occurred()) + c->c_st = _PySymtable_Build(mod, filename, &c->c_future); + if (c->c_st == NULL) { + if (!PyErr_Occurred()) { PyErr_SetString(PyExc_SystemError, "no symtable"); - goto finally; + } + return ERROR; + } + return SUCCESS; +} + +static struct compiler* +new_compiler(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags, + int optimize, PyArena *arena) +{ + PyCompilerFlags flags = pflags ? *pflags : _PyCompilerFlags_INIT; + struct compiler *c = PyMem_Calloc(1, sizeof(struct compiler)); + if (c == NULL) { + return NULL; } + if (compiler_setup(c, mod, filename, flags, optimize, arena) < 0) { + compiler_free(c); + return NULL; + } + return c; +} - co = compiler_mod(&c, mod); +PyCodeObject * +_PyAST_Compile(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags, + int optimize, PyArena *arena) +{ + struct compiler *c = new_compiler(mod, filename, pflags, optimize, arena); + if (c == NULL) { + return NULL; + } - finally: - compiler_free(&c); + PyCodeObject *co = compiler_mod(c, mod); + compiler_free(c); assert(co || PyErr_Occurred()); return co; } @@ -660,11 +777,10 @@ compiler_free(struct compiler *c) { if (c->c_st) _PySymtable_Free(c->c_st); - if (c->c_future) - PyObject_Free(c->c_future); Py_XDECREF(c->c_filename); - Py_DECREF(c->c_const_cache); - Py_DECREF(c->c_stack); + Py_XDECREF(c->c_const_cache); + Py_XDECREF(c->c_stack); + PyMem_Free(c); } static PyObject * @@ -778,11 +894,12 @@ cfg_builder_init(cfg_builder *g) { g->g_block_list = NULL; basicblock *block = cfg_builder_new_block(g); - if (block == NULL) - return 0; + if (block == NULL) { + return ERROR; + } g->g_curblock = g->g_entryblock = block; g->g_current_label = NO_LABEL; - return 1; + return SUCCESS; } static void @@ -840,8 +957,10 @@ compiler_set_qualname(struct compiler *c) || u->u_scope_type == COMPILER_SCOPE_CLASS) { assert(u->u_name); mangled = _Py_Mangle(parent->u_private, u->u_name); - if (!mangled) - return 0; + if (!mangled) { + return ERROR; + } + scope = _PyST_GetScope(parent->u_ste, mangled); Py_DECREF(mangled); assert(scope != GLOBAL_IMPLICIT); @@ -857,12 +976,12 @@ compiler_set_qualname(struct compiler *c) _Py_DECLARE_STR(dot_locals, ".<locals>"); base = PyUnicode_Concat(parent->u_qualname, &_Py_STR(dot_locals)); - if (base == NULL) - return 0; + if (base == NULL) { + return ERROR; + } } else { - Py_INCREF(parent->u_qualname); - base = parent->u_qualname; + base = Py_NewRef(parent->u_qualname); } } } @@ -871,19 +990,20 @@ compiler_set_qualname(struct compiler *c) _Py_DECLARE_STR(dot, "."); name = PyUnicode_Concat(base, &_Py_STR(dot)); Py_DECREF(base); - if (name == NULL) - return 0; + if (name == NULL) { + return ERROR; + } PyUnicode_Append(&name, u->u_name); - if (name == NULL) - return 0; + if (name == NULL) { + return ERROR; + } } else { - Py_INCREF(u->u_name); - name = u->u_name; + name = Py_NewRef(u->u_name); } u->u_qualname = name; - return 1; + return SUCCESS; } static jump_target_label @@ -1003,20 +1123,6 @@ basicblock_next_instr(basicblock *b) return b->b_iused++; } -/* Set the line number and column offset for the following instructions. - - The line number is reset in the following cases: - - when entering a new scope - - on each statement - - on each expression and sub-expression - - before the "except" and "finally" clauses -*/ - -#define SET_LOC(c, x) - -// Artificial instructions -#define UNSET_LOC(c) - /* Return the stack effect of opcode with argument oparg. @@ -1046,6 +1152,10 @@ stack_effect(int opcode, int oparg, int jump) return -2; /* Unary operators */ + case UNARY_POSITIVE_R: + case UNARY_NEGATIVE_R: + case UNARY_NOT_R: + case UNARY_INVERT_R: case UNARY_POSITIVE: case UNARY_NEGATIVE: case UNARY_NOT: @@ -1176,6 +1286,9 @@ stack_effect(int opcode, int oparg, int jump) * if an exception be raised. */ return jump ? 1 : 0; + case STOPITERATION_ERROR: + return 0; + case PREP_RERAISE_STAR: return -1; case RERAISE: @@ -1187,9 +1300,11 @@ stack_effect(int opcode, int oparg, int jump) return 1; case LOAD_FAST: + case LOAD_FAST_R: case LOAD_FAST_CHECK: return 1; case STORE_FAST: + case STORE_FAST_R: return -1; case DELETE_FAST: return 0; @@ -1275,6 +1390,8 @@ stack_effect(int opcode, int oparg, int jump) return 1; case BINARY_OP: return -1; + case INTERPRETER_EXIT: + return -1; default: return PY_INVALID_STACK_EFFECT; } @@ -1293,12 +1410,9 @@ PyCompile_OpcodeStackEffect(int opcode, int oparg) return stack_effect(opcode, oparg, -1); } -/* Add an opcode with no argument. - Returns 0 on failure, 1 on success. -*/ - static int -basicblock_addop(basicblock *b, int opcode, int oparg, location loc) +basicblock_addop(basicblock *b, int opcode, int oparg, location loc, + oparg_t oparg1, oparg_t oparg2, oparg_t oparg3) { assert(IS_WITHIN_OPCODE_RANGE(opcode)); assert(!IS_ASSEMBLER_OPCODE(opcode)); @@ -1307,15 +1421,18 @@ basicblock_addop(basicblock *b, int opcode, int oparg, location loc) int off = basicblock_next_instr(b); if (off < 0) { - return 0; + return ERROR; } struct instr *i = &b->b_instr[off]; i->i_opcode = opcode; i->i_oparg = oparg; + i->i_oparg1 = oparg1; + i->i_oparg2 = oparg2; + i->i_oparg3 = oparg3; i->i_target = NULL; i->i_loc = loc; - return 1; + return SUCCESS; } static bool @@ -1344,19 +1461,23 @@ cfg_builder_maybe_start_new_block(cfg_builder *g) } static int -cfg_builder_addop(cfg_builder *g, int opcode, int oparg, location loc) +cfg_builder_addop(cfg_builder *g, int opcode, int oparg, location loc, + oparg_t oparg1, oparg_t oparg2, oparg_t oparg3) { if (cfg_builder_maybe_start_new_block(g) != 0) { return -1; } - return basicblock_addop(g->g_curblock, opcode, oparg, loc); + return basicblock_addop(g->g_curblock, opcode, oparg, loc, + oparg1, oparg2, oparg3); } + static int cfg_builder_addop_noarg(cfg_builder *g, int opcode, location loc) { assert(!HAS_ARG(opcode)); - return cfg_builder_addop(g, opcode, 0, loc); + return cfg_builder_addop(g, opcode, 0, loc, + UNUSED_OPARG, UNUSED_OPARG, UNUSED_OPARG); } static Py_ssize_t @@ -1394,8 +1515,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) // None and Ellipsis are singleton, and key is the singleton. // No need to merge object and key. if (o == Py_None || o == Py_Ellipsis) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } PyObject *key = _PyCode_ConstantKey(o); @@ -1434,8 +1554,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) v = u; } if (v != item) { - Py_INCREF(v); - PyTuple_SET_ITEM(o, i, v); + PyTuple_SET_ITEM(o, i, Py_NewRef(v)); Py_DECREF(item); } @@ -1470,8 +1589,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) } PyObject *u; if (PyTuple_CheckExact(k)) { - u = PyTuple_GET_ITEM(k, 1); - Py_INCREF(u); + u = Py_NewRef(PyTuple_GET_ITEM(k, 1)); Py_DECREF(k); } else { @@ -1514,8 +1632,9 @@ static int compiler_addop_load_const(struct compiler *c, location loc, PyObject *o) { Py_ssize_t arg = compiler_add_const(c, o); - if (arg < 0) - return 0; + if (arg < 0) { + return ERROR; + } return cfg_builder_addop_i(CFG_BUILDER(c), LOAD_CONST, arg, loc); } @@ -1524,8 +1643,9 @@ compiler_addop_o(struct compiler *c, location loc, int opcode, PyObject *dict, PyObject *o) { Py_ssize_t arg = dict_add_o(dict, o); - if (arg < 0) - return 0; + if (arg < 0) { + return ERROR; + } return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc); } @@ -1536,12 +1656,14 @@ compiler_addop_name(struct compiler *c, location loc, Py_ssize_t arg; PyObject *mangled = _Py_Mangle(c->u->u_private, o); - if (!mangled) - return 0; + if (!mangled) { + return ERROR; + } arg = dict_add_o(dict, mangled); Py_DECREF(mangled); - if (arg < 0) - return 0; + if (arg < 0) { + return ERROR; + } if (opcode == LOAD_ATTR) { arg <<= 1; } @@ -1553,9 +1675,7 @@ compiler_addop_name(struct compiler *c, location loc, return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc); } -/* Add an opcode with an integer argument. - Returns 0 on failure, 1 on success. -*/ +/* Add an opcode with an integer argument */ static int cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, location loc) { @@ -1568,7 +1688,8 @@ cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, location loc) EXTENDED_ARG is used for 16, 24, and 32-bit arguments. */ int oparg_ = Py_SAFE_DOWNCAST(oparg, Py_ssize_t, int); - return cfg_builder_addop(g, opcode, oparg_, loc); + return cfg_builder_addop(g, opcode, oparg_, loc, + UNUSED_OPARG, UNUSED_OPARG, UNUSED_OPARG); } static int @@ -1577,97 +1698,86 @@ cfg_builder_addop_j(cfg_builder *g, location loc, { assert(IS_LABEL(target)); assert(IS_JUMP_OPCODE(opcode) || IS_BLOCK_PUSH_OPCODE(opcode)); - return cfg_builder_addop(g, opcode, target.id, loc); + return cfg_builder_addop(g, opcode, target.id, loc, + UNUSED_OPARG, UNUSED_OPARG, UNUSED_OPARG); } +#define ADDOP(C, LOC, OP) \ + RETURN_IF_ERROR(cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) -#define ADDOP(C, LOC, OP) { \ - if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) \ - return 0; \ -} +#define ADDOP_REGS(C, LOC, OP, R1, R2, R3) \ + RETURN_IF_ERROR(cfg_builder_addop(CFG_BUILDER(C), (OP), 0, (LOC), (R1), (R2), (R3))) #define ADDOP_IN_SCOPE(C, LOC, OP) { \ - if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) { \ + if (cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC)) < 0) { \ compiler_exit_scope(c); \ - return 0; \ + return -1; \ } \ } -#define ADDOP_LOAD_CONST(C, LOC, O) { \ - if (!compiler_addop_load_const((C), (LOC), (O))) \ - return 0; \ -} +#define ADDOP_LOAD_CONST(C, LOC, O) \ + RETURN_IF_ERROR(compiler_addop_load_const((C), (LOC), (O))) /* Same as ADDOP_LOAD_CONST, but steals a reference. */ #define ADDOP_LOAD_CONST_NEW(C, LOC, O) { \ PyObject *__new_const = (O); \ if (__new_const == NULL) { \ - return 0; \ + return ERROR; \ } \ - if (!compiler_addop_load_const((C), (LOC), __new_const)) { \ + if (compiler_addop_load_const((C), (LOC), __new_const) < 0) { \ Py_DECREF(__new_const); \ - return 0; \ + return ERROR; \ } \ Py_DECREF(__new_const); \ } #define ADDOP_N(C, LOC, OP, O, TYPE) { \ assert(!HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \ - if (!compiler_addop_o((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) { \ + if (compiler_addop_o((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O)) < 0) { \ Py_DECREF((O)); \ - return 0; \ + return ERROR; \ } \ Py_DECREF((O)); \ } -#define ADDOP_NAME(C, LOC, OP, O, TYPE) { \ - if (!compiler_addop_name((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) \ - return 0; \ -} +#define ADDOP_NAME(C, LOC, OP, O, TYPE) \ + RETURN_IF_ERROR(compiler_addop_name((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) -#define ADDOP_I(C, LOC, OP, O) { \ - if (!cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), (LOC))) \ - return 0; \ -} +#define ADDOP_I(C, LOC, OP, O) \ + RETURN_IF_ERROR(cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), (LOC))) -#define ADDOP_JUMP(C, LOC, OP, O) { \ - if (!cfg_builder_addop_j(CFG_BUILDER(C), (LOC), (OP), (O))) \ - return 0; \ -} +#define ADDOP_JUMP(C, LOC, OP, O) \ + RETURN_IF_ERROR(cfg_builder_addop_j(CFG_BUILDER(C), (LOC), (OP), (O))) -#define ADDOP_COMPARE(C, LOC, CMP) { \ - if (!compiler_addcompare((C), (LOC), (cmpop_ty)(CMP))) \ - return 0; \ -} +#define ADDOP_COMPARE(C, LOC, CMP) \ + RETURN_IF_ERROR(compiler_addcompare((C), (LOC), (cmpop_ty)(CMP))) #define ADDOP_BINARY(C, LOC, BINOP) \ - RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), false)) + RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), false)) #define ADDOP_INPLACE(C, LOC, BINOP) \ - RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), true)) + RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), true)) #define ADD_YIELD_FROM(C, LOC, await) \ - RETURN_IF_FALSE(compiler_add_yield_from((C), (LOC), (await))) + RETURN_IF_ERROR(compiler_add_yield_from((C), (LOC), (await))) #define POP_EXCEPT_AND_RERAISE(C, LOC) \ - RETURN_IF_FALSE(compiler_pop_except_and_reraise((C), (LOC))) + RETURN_IF_ERROR(compiler_pop_except_and_reraise((C), (LOC))) #define ADDOP_YIELD(C, LOC) \ - RETURN_IF_FALSE(addop_yield((C), (LOC))) + RETURN_IF_ERROR(addop_yield((C), (LOC))) /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use the ASDL name to synthesize the name of the C type and the visit function. */ -#define VISIT(C, TYPE, V) {\ - if (!compiler_visit_ ## TYPE((C), (V))) \ - return 0; \ -} +#define VISIT(C, TYPE, V) \ + RETURN_IF_ERROR(compiler_visit_ ## TYPE((C), (V))); #define VISIT_IN_SCOPE(C, TYPE, V) {\ - if (!compiler_visit_ ## TYPE((C), (V))) { \ + if (compiler_visit_ ## TYPE((C), (V)) < 0) { \ compiler_exit_scope(c); \ - return 0; \ + return ERROR; \ } \ } @@ -1676,8 +1786,8 @@ cfg_builder_addop_j(cfg_builder *g, location loc, asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \ for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \ TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \ - if (!compiler_visit_ ## TYPE((C), elt)) \ - return 0; \ + if (compiler_visit_ ## TYPE((C), elt) < 0) \ + return ERROR; \ } \ } @@ -1686,17 +1796,13 @@ cfg_builder_addop_j(cfg_builder *g, location loc, asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \ for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \ TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \ - if (!compiler_visit_ ## TYPE((C), elt)) { \ + if (compiler_visit_ ## TYPE((C), elt) < 0) { \ compiler_exit_scope(c); \ - return 0; \ + return ERROR; \ } \ } \ } -#define RETURN_IF_FALSE(X) \ - if (!(X)) { \ - return 0; \ - } static int compiler_enter_scope(struct compiler *c, identifier name, @@ -1710,7 +1816,7 @@ compiler_enter_scope(struct compiler *c, identifier name, struct compiler_unit)); if (!u) { PyErr_NoMemory(); - return 0; + return ERROR; } u->u_scope_type = scope_type; u->u_argcount = 0; @@ -1719,15 +1825,14 @@ compiler_enter_scope(struct compiler *c, identifier name, u->u_ste = PySymtable_Lookup(c->c_st, key); if (!u->u_ste) { compiler_unit_free(u); - return 0; + return ERROR; } - Py_INCREF(name); - u->u_name = name; + u->u_name = Py_NewRef(name); u->u_varnames = list2dict(u->u_ste->ste_varnames); u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0); if (!u->u_varnames || !u->u_cellvars) { compiler_unit_free(u); - return 0; + return ERROR; } if (u->u_ste->ste_needs_class_closure) { /* Cook up an implicit __class__ cell. */ @@ -1738,7 +1843,7 @@ compiler_enter_scope(struct compiler *c, identifier name, _PyLong_GetZero()); if (res < 0) { compiler_unit_free(u); - return 0; + return ERROR; } } @@ -1746,7 +1851,7 @@ compiler_enter_scope(struct compiler *c, identifier name, PyDict_GET_SIZE(u->u_cellvars)); if (!u->u_freevars) { compiler_unit_free(u); - return 0; + return ERROR; } u->u_nfblocks = 0; @@ -1754,12 +1859,12 @@ compiler_enter_scope(struct compiler *c, identifier name, u->u_consts = PyDict_New(); if (!u->u_consts) { compiler_unit_free(u); - return 0; + return ERROR; } u->u_names = PyDict_New(); if (!u->u_names) { compiler_unit_free(u); - return 0; + return ERROR; } u->u_private = NULL; @@ -1770,34 +1875,30 @@ compiler_enter_scope(struct compiler *c, identifier name, if (!capsule || PyList_Append(c->c_stack, capsule) < 0) { Py_XDECREF(capsule); compiler_unit_free(u); - return 0; + return ERROR; } Py_DECREF(capsule); - u->u_private = c->u->u_private; - Py_XINCREF(u->u_private); + u->u_private = Py_XNewRef(c->u->u_private); } c->u = u; c->c_nestlevel++; cfg_builder *g = CFG_BUILDER(c); - if (!cfg_builder_init(g)) { - return 0; - } + RETURN_IF_ERROR(cfg_builder_init(g)); if (u->u_scope_type == COMPILER_SCOPE_MODULE) { loc.lineno = 0; } else { - if (!compiler_set_qualname(c)) - return 0; + RETURN_IF_ERROR(compiler_set_qualname(c)); } ADDOP_I(c, loc, RESUME, 0); if (u->u_scope_type == COMPILER_SCOPE_MODULE) { loc.lineno = -1; } - return 1; + return SUCCESS; } static void @@ -1831,7 +1932,7 @@ compiler_exit_scope(struct compiler *c) /* Search if variable annotations are present statically in a block. */ -static int +static bool find_ann(asdl_stmt_seq *stmts) { int i, j, res = 0; @@ -1841,7 +1942,7 @@ find_ann(asdl_stmt_seq *stmts) st = (stmt_ty)asdl_seq_GET(stmts, i); switch (st->kind) { case AnnAssign_kind: - return 1; + return true; case For_kind: res = find_ann(st->v.For.body) || find_ann(st->v.For.orelse); @@ -1869,7 +1970,7 @@ find_ann(asdl_stmt_seq *stmts) excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( st->v.Try.handlers, j); if (find_ann(handler->v.ExceptHandler.body)) { - return 1; + return true; } } res = find_ann(st->v.Try.body) || @@ -1881,7 +1982,7 @@ find_ann(asdl_stmt_seq *stmts) excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( st->v.TryStar.handlers, j); if (find_ann(handler->v.ExceptHandler.body)) { - return 1; + return true; } } res = find_ann(st->v.TryStar.body) || @@ -1889,7 +1990,7 @@ find_ann(asdl_stmt_seq *stmts) find_ann(st->v.TryStar.orelse); break; default: - res = 0; + res = false; } if (res) { break; @@ -1916,7 +2017,7 @@ compiler_push_fblock(struct compiler *c, location loc, f->fb_block = block_label; f->fb_exit = exit; f->fb_datum = datum; - return 1; + return SUCCESS; } static void @@ -1936,7 +2037,7 @@ compiler_call_exit_with_nones(struct compiler *c, location loc) ADDOP_LOAD_CONST(c, loc, Py_None); ADDOP_LOAD_CONST(c, loc, Py_None); ADDOP_I(c, loc, CALL, 2); - return 1; + return SUCCESS; } static int @@ -1960,7 +2061,7 @@ compiler_add_yield_from(struct compiler *c, location loc, int await) ADDOP(c, loc, CLEANUP_THROW); USE_LABEL(c, exit); - return 1; + return SUCCESS; } static int @@ -1976,7 +2077,7 @@ compiler_pop_except_and_reraise(struct compiler *c, location loc) ADDOP_I(c, loc, COPY, 3); ADDOP(c, loc, POP_EXCEPT); ADDOP_I(c, loc, RERAISE, 1); - return 1; + return SUCCESS; } /* Unwind a frame block. If preserve_tos is true, the TOS before @@ -1993,7 +2094,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc, case EXCEPTION_HANDLER: case EXCEPTION_GROUP_HANDLER: case ASYNC_COMPREHENSION_GENERATOR: - return 1; + return SUCCESS; case FOR_LOOP: /* Pop the iterator */ @@ -2001,19 +2102,18 @@ compiler_unwind_fblock(struct compiler *c, location *ploc, ADDOP_I(c, *ploc, SWAP, 2); } ADDOP(c, *ploc, POP_TOP); - return 1; + return SUCCESS; case TRY_EXCEPT: ADDOP(c, *ploc, POP_BLOCK); - return 1; + return SUCCESS; case FINALLY_TRY: /* This POP_BLOCK gets the line number of the unwinding statement */ ADDOP(c, *ploc, POP_BLOCK); if (preserve_tos) { - if (!compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL)) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL)); } /* Emit the finally block */ VISIT_SEQ(c, stmt, info->fb_datum); @@ -2023,9 +2123,8 @@ compiler_unwind_fblock(struct compiler *c, location *ploc, /* The finally block should appear to execute after the * statement causing the unwinding, so make the unwinding * instruction artificial */ - UNSET_LOC(c); *ploc = NO_LOCATION; - return 1; + return SUCCESS; case FINALLY_END: if (preserve_tos) { @@ -2037,19 +2136,16 @@ compiler_unwind_fblock(struct compiler *c, location *ploc, } ADDOP(c, *ploc, POP_BLOCK); ADDOP(c, *ploc, POP_EXCEPT); - return 1; + return SUCCESS; case WITH: case ASYNC_WITH: - SET_LOC(c, (stmt_ty)info->fb_datum); *ploc = LOC((stmt_ty)info->fb_datum); ADDOP(c, *ploc, POP_BLOCK); if (preserve_tos) { ADDOP_I(c, *ploc, SWAP, 2); } - if(!compiler_call_exit_with_nones(c, *ploc)) { - return 0; - } + RETURN_IF_ERROR(compiler_call_exit_with_nones(c, *ploc)); if (info->fb_type == ASYNC_WITH) { ADDOP_I(c, *ploc, GET_AWAITABLE, 2); ADDOP_LOAD_CONST(c, *ploc, Py_None); @@ -2059,9 +2155,8 @@ compiler_unwind_fblock(struct compiler *c, location *ploc, /* The exit block should appear to execute after the * statement causing the unwinding, so make the unwinding * instruction artificial */ - UNSET_LOC(c); *ploc = NO_LOCATION; - return 1; + return SUCCESS; case HANDLER_CLEANUP: { if (info->fb_datum) { @@ -2074,17 +2169,17 @@ compiler_unwind_fblock(struct compiler *c, location *ploc, ADDOP(c, *ploc, POP_EXCEPT); if (info->fb_datum) { ADDOP_LOAD_CONST(c, *ploc, Py_None); - compiler_nameop(c, *ploc, info->fb_datum, Store); - compiler_nameop(c, *ploc, info->fb_datum, Del); + RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Store)); + RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Del)); } - return 1; + return SUCCESS; } case POP_VALUE: { if (preserve_tos) { ADDOP_I(c, *ploc, SWAP, 2); } ADDOP(c, *ploc, POP_TOP); - return 1; + return SUCCESS; } } Py_UNREACHABLE(); @@ -2096,7 +2191,7 @@ compiler_unwind_fblock_stack(struct compiler *c, location *ploc, int preserve_tos, struct fblockinfo **loop) { if (c->u->u_nfblocks == 0) { - return 1; + return SUCCESS; } struct fblockinfo *top = &c->u->u_fblock[c->u->u_nfblocks-1]; if (top->fb_type == EXCEPTION_GROUP_HANDLER) { @@ -2105,19 +2200,15 @@ compiler_unwind_fblock_stack(struct compiler *c, location *ploc, } if (loop != NULL && (top->fb_type == WHILE_LOOP || top->fb_type == FOR_LOOP)) { *loop = top; - return 1; + return SUCCESS; } struct fblockinfo copy = *top; c->u->u_nfblocks--; - if (!compiler_unwind_fblock(c, ploc, ©, preserve_tos)) { - return 0; - } - if (!compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop)) { - return 0; - } + RETURN_IF_ERROR(compiler_unwind_fblock(c, ploc, ©, preserve_tos)); + RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop)); c->u->u_fblock[c->u->u_nfblocks] = copy; c->u->u_nfblocks++; - return 1; + return SUCCESS; } /* Compile a sequence of statements, checking for a docstring @@ -2136,15 +2227,15 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) If body is empty, then lineno will be set later in assemble. */ if (c->u->u_scope_type == COMPILER_SCOPE_MODULE && asdl_seq_LEN(stmts)) { st = (stmt_ty)asdl_seq_GET(stmts, 0); - SET_LOC(c, st); loc = LOC(st); } /* Every annotated class and module should have __annotations__. */ if (find_ann(stmts)) { ADDOP(c, loc, SETUP_ANNOTATIONS); } - if (!asdl_seq_LEN(stmts)) - return 1; + if (!asdl_seq_LEN(stmts)) { + return SUCCESS; + } /* if not -OO mode, set docstring */ if (c->c_optimize < 2) { docstring = _PyAST_GetDocString(stmts); @@ -2153,32 +2244,29 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) st = (stmt_ty)asdl_seq_GET(stmts, 0); assert(st->kind == Expr_kind); VISIT(c, expr, st->v.Expr.value); - UNSET_LOC(c); - if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store)) - return 0; + RETURN_IF_ERROR(compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store)); } } - for (; i < asdl_seq_LEN(stmts); i++) + for (; i < asdl_seq_LEN(stmts); i++) { VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i)); - return 1; + } + return SUCCESS; } -static PyCodeObject * -compiler_mod(struct compiler *c, mod_ty mod) +static int +compiler_codegen(struct compiler *c, mod_ty mod) { - PyCodeObject *co; - int addNone = 1; _Py_DECLARE_STR(anon_module, "<module>"); - if (!compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE, - mod, 1)) { - return NULL; - } + RETURN_IF_ERROR( + compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE, + mod, 1)); + location loc = LOCATION(1, 1, 0, 0); switch (mod->kind) { case Module_kind: - if (!compiler_body(c, loc, mod->v.Module.body)) { + if (compiler_body(c, loc, mod->v.Module.body) < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } break; case Interactive_kind: @@ -2190,15 +2278,24 @@ compiler_mod(struct compiler *c, mod_ty mod) break; case Expression_kind: VISIT_IN_SCOPE(c, expr, mod->v.Expression.body); - addNone = 0; break; default: PyErr_Format(PyExc_SystemError, "module kind %d should not be possible", mod->kind); - return 0; + return ERROR; + } + return SUCCESS; +} + +static PyCodeObject * +compiler_mod(struct compiler *c, mod_ty mod) +{ + int addNone = mod->kind != Expression_kind; + if (compiler_codegen(c, mod) < 0) { + return NULL; } - co = assemble(c, addNone); + PyCodeObject *co = assemble(c, addNone); compiler_exit_scope(c); return co; } @@ -2262,7 +2359,7 @@ compiler_make_closure(struct compiler *c, location loc, */ int reftype = get_ref_type(c, name); if (reftype == -1) { - return 0; + return ERROR; } int arg; if (reftype == CELL) { @@ -2285,7 +2382,7 @@ compiler_make_closure(struct compiler *c, location loc, co->co_name, freevars); Py_DECREF(freevars); - return 0; + return ERROR; } ADDOP_I(c, loc, LOAD_CLOSURE, arg); } @@ -2294,35 +2391,34 @@ compiler_make_closure(struct compiler *c, location loc, } ADDOP_LOAD_CONST(c, loc, (PyObject*)co); ADDOP_I(c, loc, MAKE_FUNCTION, flags); - return 1; + return SUCCESS; } static int compiler_decorators(struct compiler *c, asdl_expr_seq* decos) { - int i; - - if (!decos) - return 1; + if (!decos) { + return SUCCESS; + } - for (i = 0; i < asdl_seq_LEN(decos); i++) { + for (Py_ssize_t i = 0; i < asdl_seq_LEN(decos); i++) { VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i)); } - return 1; + return SUCCESS; } static int compiler_apply_decorators(struct compiler *c, asdl_expr_seq* decos) { - if (!decos) - return 1; + if (!decos) { + return SUCCESS; + } for (Py_ssize_t i = asdl_seq_LEN(decos) - 1; i > -1; i--) { - SET_LOC(c, (expr_ty)asdl_seq_GET(decos, i)); location loc = LOC((expr_ty)asdl_seq_GET(decos, i)); ADDOP_I(c, loc, CALL, 0); } - return 1; + return SUCCESS; } static int @@ -2331,7 +2427,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc, { /* Push a dict of keyword-only default values. - Return 0 on error, -1 if no dict pushed, 1 if a dict is pushed. + Return -1 on error, 0 if no dict pushed, 1 if a dict is pushed. */ int i; PyObject *keys = NULL; @@ -2348,7 +2444,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc, keys = PyList_New(1); if (keys == NULL) { Py_DECREF(mangled); - return 0; + return ERROR; } PyList_SET_ITEM(keys, 0, mangled); } @@ -2359,7 +2455,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc, goto error; } } - if (!compiler_visit_expr(c, default_)) { + if (compiler_visit_expr(c, default_) < 0) { goto error; } } @@ -2374,12 +2470,12 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc, return 1; } else { - return -1; + return 0; } error: Py_XDECREF(keys); - return 0; + return ERROR; } static int @@ -2387,7 +2483,7 @@ compiler_visit_annexpr(struct compiler *c, expr_ty annotation) { location loc = LOC(annotation); ADDOP_LOAD_CONST_NEW(c, loc, _PyAST_ExprAsUnicode(annotation)); - return 1; + return SUCCESS; } static int @@ -2395,16 +2491,16 @@ compiler_visit_argannotation(struct compiler *c, identifier id, expr_ty annotation, Py_ssize_t *annotations_len, location loc) { if (!annotation) { - return 1; + return SUCCESS; } PyObject *mangled = _Py_Mangle(c->u->u_private, id); if (!mangled) { - return 0; + return ERROR; } ADDOP_LOAD_CONST(c, loc, mangled); Py_DECREF(mangled); - if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) { + if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { VISIT(c, annexpr, annotation); } else { @@ -2421,7 +2517,7 @@ compiler_visit_argannotation(struct compiler *c, identifier id, } } *annotations_len += 2; - return 1; + return SUCCESS; } static int @@ -2431,15 +2527,15 @@ compiler_visit_argannotations(struct compiler *c, asdl_arg_seq* args, int i; for (i = 0; i < asdl_seq_LEN(args); i++) { arg_ty arg = (arg_ty)asdl_seq_GET(args, i); - if (!compiler_visit_argannotation( + RETURN_IF_ERROR( + compiler_visit_argannotation( c, arg->arg, arg->annotation, annotations_len, - loc)) - return 0; + loc)); } - return 1; + return SUCCESS; } static int @@ -2449,36 +2545,40 @@ compiler_visit_annotations(struct compiler *c, location loc, /* Push arg annotation names and values. The expressions are evaluated out-of-order wrt the source code. - Return 0 on error, -1 if no annotations pushed, 1 if a annotations is pushed. + Return -1 on error, 0 if no annotations pushed, 1 if a annotations is pushed. */ Py_ssize_t annotations_len = 0; - if (!compiler_visit_argannotations(c, args->args, &annotations_len, loc)) - return 0; - if (!compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc)) - return 0; - if (args->vararg && args->vararg->annotation && - !compiler_visit_argannotation(c, args->vararg->arg, - args->vararg->annotation, &annotations_len, loc)) - return 0; - if (!compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc)) - return 0; - if (args->kwarg && args->kwarg->annotation && - !compiler_visit_argannotation(c, args->kwarg->arg, - args->kwarg->annotation, &annotations_len, loc)) - return 0; + RETURN_IF_ERROR( + compiler_visit_argannotations(c, args->args, &annotations_len, loc)); - if (!compiler_visit_argannotation(c, &_Py_ID(return), returns, - &annotations_len, loc)) { - return 0; + RETURN_IF_ERROR( + compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc)); + + if (args->vararg && args->vararg->annotation) { + RETURN_IF_ERROR( + compiler_visit_argannotation(c, args->vararg->arg, + args->vararg->annotation, &annotations_len, loc)); + } + + RETURN_IF_ERROR( + compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc)); + + if (args->kwarg && args->kwarg->annotation) { + RETURN_IF_ERROR( + compiler_visit_argannotation(c, args->kwarg->arg, + args->kwarg->annotation, &annotations_len, loc)); } + RETURN_IF_ERROR( + compiler_visit_argannotation(c, &_Py_ID(return), returns, &annotations_len, loc)); + if (annotations_len) { ADDOP_I(c, loc, BUILD_TUPLE, annotations_len); return 1; } - return -1; + return 0; } static int @@ -2487,7 +2587,7 @@ compiler_visit_defaults(struct compiler *c, arguments_ty args, { VISIT_SEQ(c, expr, args->defaults); ADDOP_I(c, loc, BUILD_TUPLE, asdl_seq_LEN(args->defaults)); - return 1; + return SUCCESS; } static Py_ssize_t @@ -2496,47 +2596,45 @@ compiler_default_arguments(struct compiler *c, location loc, { Py_ssize_t funcflags = 0; if (args->defaults && asdl_seq_LEN(args->defaults) > 0) { - if (!compiler_visit_defaults(c, args, loc)) - return -1; + RETURN_IF_ERROR(compiler_visit_defaults(c, args, loc)); funcflags |= 0x01; } if (args->kwonlyargs) { int res = compiler_visit_kwonlydefaults(c, loc, args->kwonlyargs, args->kw_defaults); - if (res == 0) { - return -1; - } - else if (res > 0) { + RETURN_IF_ERROR(res); + if (res > 0) { funcflags |= 0x02; } } return funcflags; } -static int +static bool forbidden_name(struct compiler *c, location loc, identifier name, expr_context_ty ctx) { if (ctx == Store && _PyUnicode_EqualToASCIIString(name, "__debug__")) { compiler_error(c, loc, "cannot assign to __debug__"); - return 1; + return true; } if (ctx == Del && _PyUnicode_EqualToASCIIString(name, "__debug__")) { compiler_error(c, loc, "cannot delete __debug__"); - return 1; + return true; } - return 0; + return false; } static int compiler_check_debug_one_arg(struct compiler *c, arg_ty arg) { if (arg != NULL) { - if (forbidden_name(c, LOC(arg), arg->arg, Store)) - return 0; + if (forbidden_name(c, LOC(arg), arg->arg, Store)) { + return ERROR; + } } - return 1; + return SUCCESS; } static int @@ -2544,27 +2642,55 @@ compiler_check_debug_args_seq(struct compiler *c, asdl_arg_seq *args) { if (args != NULL) { for (Py_ssize_t i = 0, n = asdl_seq_LEN(args); i < n; i++) { - if (!compiler_check_debug_one_arg(c, asdl_seq_GET(args, i))) - return 0; + RETURN_IF_ERROR( + compiler_check_debug_one_arg(c, asdl_seq_GET(args, i))); } } - return 1; + return SUCCESS; } static int compiler_check_debug_args(struct compiler *c, arguments_ty args) { - if (!compiler_check_debug_args_seq(c, args->posonlyargs)) - return 0; - if (!compiler_check_debug_args_seq(c, args->args)) - return 0; - if (!compiler_check_debug_one_arg(c, args->vararg)) - return 0; - if (!compiler_check_debug_args_seq(c, args->kwonlyargs)) - return 0; - if (!compiler_check_debug_one_arg(c, args->kwarg)) - return 0; - return 1; + RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->posonlyargs)); + RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->args)); + RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->vararg)); + RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->kwonlyargs)); + RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->kwarg)); + return SUCCESS; +} + +static inline int +insert_instruction(basicblock *block, int pos, struct instr *instr) { + RETURN_IF_ERROR(basicblock_next_instr(block)); + for (int i = block->b_iused - 1; i > pos; i--) { + block->b_instr[i] = block->b_instr[i-1]; + } + block->b_instr[pos] = *instr; + return SUCCESS; +} + +static int +wrap_in_stopiteration_handler(struct compiler *c) +{ + NEW_JUMP_TARGET_LABEL(c, handler); + + /* Insert SETUP_CLEANUP at start */ + struct instr setup = { + .i_opcode = SETUP_CLEANUP, + .i_oparg = handler.id, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + RETURN_IF_ERROR( + insert_instruction(c->u->u_cfg_builder.g_entryblock, 0, &setup)); + + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + ADDOP(c, NO_LOCATION, RETURN_VALUE); + USE_LABEL(c, handler); + ADDOP(c, NO_LOCATION, STOPITERATION_ERROR); + ADDOP_I(c, NO_LOCATION, RERAISE, 1); + return SUCCESS; } static int @@ -2604,11 +2730,8 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) scope_type = COMPILER_SCOPE_FUNCTION; } - if (!compiler_check_debug_args(c, args)) - return 0; - - if (!compiler_decorators(c, decos)) - return 0; + RETURN_IF_ERROR(compiler_check_debug_args(c, args)); + RETURN_IF_ERROR(compiler_decorators(c, decos)); firstlineno = s->lineno; if (asdl_seq_LEN(decos)) { @@ -2618,19 +2741,16 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) location loc = LOC(s); funcflags = compiler_default_arguments(c, loc, args); if (funcflags == -1) { - return 0; + return ERROR; } annotations = compiler_visit_annotations(c, loc, args, returns); - if (annotations == 0) { - return 0; - } - else if (annotations > 0) { + RETURN_IF_ERROR(annotations); + if (annotations > 0) { funcflags |= 0x04; } - if (!compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno)) { - return 0; - } + RETURN_IF_ERROR( + compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno)); /* if not -OO mode, add docstring */ if (c->c_optimize < 2) { @@ -2638,7 +2758,7 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) } if (compiler_add_const(c, docstring ? docstring : Py_None) < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } c->u->u_argcount = asdl_seq_LEN(args->args); @@ -2647,25 +2767,29 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) for (i = docstring ? 1 : 0; i < asdl_seq_LEN(body); i++) { VISIT_IN_SCOPE(c, stmt, (stmt_ty)asdl_seq_GET(body, i)); } + if (c->u->u_ste->ste_coroutine || c->u->u_ste->ste_generator) { + if (wrap_in_stopiteration_handler(c) < 0) { + compiler_exit_scope(c); + return ERROR; + } + } co = assemble(c, 1); - qualname = c->u->u_qualname; - Py_INCREF(qualname); + qualname = Py_NewRef(c->u->u_qualname); compiler_exit_scope(c); if (co == NULL) { Py_XDECREF(qualname); Py_XDECREF(co); - return 0; + return ERROR; } - if (!compiler_make_closure(c, loc, co, funcflags, qualname)) { + if (compiler_make_closure(c, loc, co, funcflags, qualname) < 0) { Py_DECREF(qualname); Py_DECREF(co); - return 0; + return ERROR; } Py_DECREF(qualname); Py_DECREF(co); - if (!compiler_apply_decorators(c, decos)) - return 0; + RETURN_IF_ERROR(compiler_apply_decorators(c, decos)); return compiler_nameop(c, loc, name, Store); } @@ -2676,8 +2800,7 @@ compiler_class(struct compiler *c, stmt_ty s) int i, firstlineno; asdl_expr_seq *decos = s->v.ClassDef.decorator_list; - if (!compiler_decorators(c, decos)) - return 0; + RETURN_IF_ERROR(compiler_decorators(c, decos)); firstlineno = s->lineno; if (asdl_seq_LEN(decos)) { @@ -2695,53 +2818,51 @@ compiler_class(struct compiler *c, stmt_ty s) This borrows from compiler_call. */ /* 1. compile the class body into a code object */ - if (!compiler_enter_scope(c, s->v.ClassDef.name, - COMPILER_SCOPE_CLASS, (void *)s, firstlineno)) { - return 0; - } + RETURN_IF_ERROR( + compiler_enter_scope(c, s->v.ClassDef.name, + COMPILER_SCOPE_CLASS, (void *)s, firstlineno)); + /* this block represents what we do in the new scope */ { location loc = LOCATION(firstlineno, firstlineno, 0, 0); /* use the class name for name mangling */ - Py_INCREF(s->v.ClassDef.name); - Py_XSETREF(c->u->u_private, s->v.ClassDef.name); + Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); /* load (global) __name__ ... */ - if (!compiler_nameop(c, loc, &_Py_ID(__name__), Load)) { + if (compiler_nameop(c, loc, &_Py_ID(__name__), Load) < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } /* ... and store it as __module__ */ - if (!compiler_nameop(c, loc, &_Py_ID(__module__), Store)) { + if (compiler_nameop(c, loc, &_Py_ID(__module__), Store) < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } assert(c->u->u_qualname); ADDOP_LOAD_CONST(c, loc, c->u->u_qualname); - if (!compiler_nameop(c, loc, &_Py_ID(__qualname__), Store)) { + if (compiler_nameop(c, loc, &_Py_ID(__qualname__), Store) < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } /* compile the body proper */ - if (!compiler_body(c, loc, s->v.ClassDef.body)) { + if (compiler_body(c, loc, s->v.ClassDef.body) < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } /* The following code is artificial */ - UNSET_LOC(c); /* Return __classcell__ if it is referenced, otherwise return None */ if (c->u->u_ste->ste_needs_class_closure) { /* Store __classcell__ into class namespace & return it */ i = compiler_lookup_arg(c->u->u_cellvars, &_Py_ID(__class__)); if (i < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } assert(i == 0); ADDOP_I(c, NO_LOCATION, LOAD_CLOSURE, i); ADDOP_I(c, NO_LOCATION, COPY, 1); - if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store)) { + if (compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store) < 0) { compiler_exit_scope(c); - return 0; + return ERROR; } } else { @@ -2755,8 +2876,9 @@ compiler_class(struct compiler *c, stmt_ty s) } /* leave the new scope */ compiler_exit_scope(c); - if (co == NULL) - return 0; + if (co == NULL) { + return ERROR; + } location loc = LOC(s); /* 2. load the 'build_class' function */ @@ -2764,9 +2886,9 @@ compiler_class(struct compiler *c, stmt_ty s) ADDOP(c, loc, LOAD_BUILD_CLASS); /* 3. load a function (or closure) made from the code object */ - if (!compiler_make_closure(c, loc, co, 0, NULL)) { + if (compiler_make_closure(c, loc, co, 0, NULL) < 0) { Py_DECREF(co); - return 0; + return ERROR; } Py_DECREF(co); @@ -2774,27 +2896,25 @@ compiler_class(struct compiler *c, stmt_ty s) ADDOP_LOAD_CONST(c, loc, s->v.ClassDef.name); /* 5. generate the rest of the code for the call */ - if (!compiler_call_helper(c, loc, 2, - s->v.ClassDef.bases, - s->v.ClassDef.keywords)) - return 0; + RETURN_IF_ERROR(compiler_call_helper(c, loc, 2, + s->v.ClassDef.bases, + s->v.ClassDef.keywords)); + /* 6. apply decorators */ - if (!compiler_apply_decorators(c, decos)) - return 0; + RETURN_IF_ERROR(compiler_apply_decorators(c, decos)); /* 7. store into <name> */ - if (!compiler_nameop(c, loc, s->v.ClassDef.name, Store)) - return 0; - return 1; + RETURN_IF_ERROR(compiler_nameop(c, loc, s->v.ClassDef.name, Store)); + return SUCCESS; } -/* Return 0 if the expression is a constant value except named singletons. - Return 1 otherwise. */ -static int +/* Return false if the expression is a constant value except named singletons. + Return true otherwise. */ +static bool check_is_arg(expr_ty e) { if (e->kind != Constant_kind) { - return 1; + return true; } PyObject *value = e->v.Constant.value; return (value == Py_None @@ -2803,19 +2923,18 @@ check_is_arg(expr_ty e) || value == Py_Ellipsis); } -/* Check operands of identity chacks ("is" and "is not"). +/* Check operands of identity checks ("is" and "is not"). Emit a warning if any operand is a constant except named singletons. - Return 0 on error. */ static int check_compare(struct compiler *c, expr_ty e) { Py_ssize_t i, n; - int left = check_is_arg(e->v.Compare.left); + bool left = check_is_arg(e->v.Compare.left); n = asdl_seq_LEN(e->v.Compare.ops); for (i = 0; i < n; i++) { cmpop_ty op = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i); - int right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i)); + bool right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i)); if (op == Is || op == IsNot) { if (!right || !left) { const char *msg = (op == Is) @@ -2826,7 +2945,7 @@ check_compare(struct compiler *c, expr_ty e) } left = right; } - return 1; + return SUCCESS; } static int compiler_addcompare(struct compiler *c, location loc, @@ -2854,21 +2973,21 @@ static int compiler_addcompare(struct compiler *c, location loc, break; case Is: ADDOP_I(c, loc, IS_OP, 0); - return 1; + return SUCCESS; case IsNot: ADDOP_I(c, loc, IS_OP, 1); - return 1; + return SUCCESS; case In: ADDOP_I(c, loc, CONTAINS_OP, 0); - return 1; + return SUCCESS; case NotIn: ADDOP_I(c, loc, CONTAINS_OP, 1); - return 1; + return SUCCESS; default: Py_UNREACHABLE(); } ADDOP_I(c, loc, COMPARE_OP, cmp); - return 1; + return SUCCESS; } @@ -2895,43 +3014,36 @@ compiler_jump_if(struct compiler *c, location loc, next2 = new_next2; } for (i = 0; i < n; ++i) { - if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2)) { - return 0; - } - } - if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond)) { - return 0; + RETURN_IF_ERROR( + compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2)); } + RETURN_IF_ERROR( + compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond)); if (!SAME_LABEL(next2, next)) { USE_LABEL(c, next2); } - return 1; + return SUCCESS; } case IfExp_kind: { NEW_JUMP_TARGET_LABEL(c, end); NEW_JUMP_TARGET_LABEL(c, next2); - if (!compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0)) { - return 0; - } - if (!compiler_jump_if(c, loc, e->v.IfExp.body, next, cond)) { - return 0; - } + RETURN_IF_ERROR( + compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0)); + RETURN_IF_ERROR( + compiler_jump_if(c, loc, e->v.IfExp.body, next, cond)); ADDOP_JUMP(c, NO_LOCATION, JUMP, end); USE_LABEL(c, next2); - if (!compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond)) { - return 0; - } + RETURN_IF_ERROR( + compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond)); USE_LABEL(c, end); - return 1; + return SUCCESS; } case Compare_kind: { Py_ssize_t n = asdl_seq_LEN(e->v.Compare.ops) - 1; if (n > 0) { - if (!check_compare(c, e)) { - return 0; - } + RETURN_IF_ERROR(check_compare(c, e)); NEW_JUMP_TARGET_LABEL(c, cleanup); VISIT(c, expr, e->v.Compare.left); for (Py_ssize_t i = 0; i < n; i++) { @@ -2955,7 +3067,7 @@ compiler_jump_if(struct compiler *c, location loc, } USE_LABEL(c, end); - return 1; + return SUCCESS; } /* fallback to general implementation */ break; @@ -2968,7 +3080,7 @@ compiler_jump_if(struct compiler *c, location loc, /* general implementation */ VISIT(c, expr, e); ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next); - return 1; + return SUCCESS; } static int @@ -2978,9 +3090,9 @@ compiler_ifexp(struct compiler *c, expr_ty e) NEW_JUMP_TARGET_LABEL(c, end); NEW_JUMP_TARGET_LABEL(c, next); - if (!compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0)) { - return 0; - } + RETURN_IF_ERROR( + compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0)); + VISIT(c, expr, e->v.IfExp.body); ADDOP_JUMP(c, NO_LOCATION, JUMP, end); @@ -2988,7 +3100,7 @@ compiler_ifexp(struct compiler *c, expr_ty e) VISIT(c, expr, e->v.IfExp.orelse); USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -3000,8 +3112,7 @@ compiler_lambda(struct compiler *c, expr_ty e) arguments_ty args = e->v.Lambda.args; assert(e->kind == Lambda_kind); - if (!compiler_check_debug_args(c, args)) - return 0; + RETURN_IF_ERROR(compiler_check_debug_args(c, args)); location loc = LOC(e); funcflags = compiler_default_arguments(c, loc, args); @@ -3010,14 +3121,13 @@ compiler_lambda(struct compiler *c, expr_ty e) } _Py_DECLARE_STR(anon_lambda, "<lambda>"); - if (!compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA, - (void *)e, e->lineno)) { - return 0; - } + RETURN_IF_ERROR( + compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA, + (void *)e, e->lineno)); + /* Make None the first constant, so the lambda can't have a docstring. */ - if (compiler_add_const(c, Py_None) < 0) - return 0; + RETURN_IF_ERROR(compiler_add_const(c, Py_None)); c->u->u_argcount = asdl_seq_LEN(args->args); c->u->u_posonlyargcount = asdl_seq_LEN(args->posonlyargs); @@ -3031,23 +3141,22 @@ compiler_lambda(struct compiler *c, expr_ty e) ADDOP_IN_SCOPE(c, loc, RETURN_VALUE); co = assemble(c, 1); } - qualname = c->u->u_qualname; - Py_INCREF(qualname); + qualname = Py_NewRef(c->u->u_qualname); compiler_exit_scope(c); if (co == NULL) { Py_DECREF(qualname); - return 0; + return ERROR; } - if (!compiler_make_closure(c, loc, co, funcflags, qualname)) { + if (compiler_make_closure(c, loc, co, funcflags, qualname) < 0) { Py_DECREF(qualname); Py_DECREF(co); - return 0; + return ERROR; } Py_DECREF(qualname); Py_DECREF(co); - return 1; + return SUCCESS; } static int @@ -3063,9 +3172,9 @@ compiler_if(struct compiler *c, stmt_ty s) else { next = end; } - if (!compiler_jump_if(c, LOC(s), s->v.If.test, next, 0)) { - return 0; - } + RETURN_IF_ERROR( + compiler_jump_if(c, LOC(s), s->v.If.test, next, 0)); + VISIT_SEQ(c, stmt, s->v.If.body); if (asdl_seq_LEN(s->v.If.orelse)) { ADDOP_JUMP(c, NO_LOCATION, JUMP, end); @@ -3075,7 +3184,7 @@ compiler_if(struct compiler *c, stmt_ty s) } USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -3087,9 +3196,8 @@ compiler_for(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, cleanup); NEW_JUMP_TARGET_LABEL(c, end); - if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) { - return 0; - } + RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)); + VISIT(c, expr, s->v.For.iter); ADDOP(c, loc, GET_ITER); @@ -3100,7 +3208,6 @@ compiler_for(struct compiler *c, stmt_ty s) VISIT(c, expr, s->v.For.target); VISIT_SEQ(c, stmt, s->v.For.body); /* Mark jump as artificial */ - UNSET_LOC(c); ADDOP_JUMP(c, NO_LOCATION, JUMP, start); USE_LABEL(c, cleanup); @@ -3111,7 +3218,7 @@ compiler_for(struct compiler *c, stmt_ty s) VISIT_SEQ(c, stmt, s->v.For.orelse); USE_LABEL(c, end); - return 1; + return SUCCESS; } @@ -3133,9 +3240,8 @@ compiler_async_for(struct compiler *c, stmt_ty s) ADDOP(c, loc, GET_AITER); USE_LABEL(c, start); - if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) { - return 0; - } + RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)); + /* SETUP_FINALLY to guard the __anext__ call */ ADDOP_JUMP(c, loc, SETUP_FINALLY, except); ADDOP(c, loc, GET_ANEXT); @@ -3147,7 +3253,6 @@ compiler_async_for(struct compiler *c, stmt_ty s) VISIT(c, expr, s->v.AsyncFor.target); VISIT_SEQ(c, stmt, s->v.AsyncFor.body); /* Mark jump as artificial */ - UNSET_LOC(c); ADDOP_JUMP(c, NO_LOCATION, JUMP, start); compiler_pop_fblock(c, FOR_LOOP, start); @@ -3157,7 +3262,6 @@ compiler_async_for(struct compiler *c, stmt_ty s) /* Use same line number as the iterator, * as the END_ASYNC_FOR succeeds the `for`, not the body. */ - SET_LOC(c, s->v.AsyncFor.iter); loc = LOC(s->v.AsyncFor.iter); ADDOP(c, loc, END_ASYNC_FOR); @@ -3165,7 +3269,7 @@ compiler_async_for(struct compiler *c, stmt_ty s) VISIT_SEQ(c, stmt, s->v.For.orelse); USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -3177,18 +3281,13 @@ compiler_while(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, anchor); USE_LABEL(c, loop); - if (!compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL)) { - return 0; - } - if (!compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0)) { - return 0; - } + + RETURN_IF_ERROR(compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL)); + RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0)); USE_LABEL(c, body); VISIT_SEQ(c, stmt, s->v.While.body); - if (!compiler_jump_if(c, LOC(s), s->v.While.test, body, 1)) { - return 0; - } + RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, body, 1)); compiler_pop_fblock(c, WHILE_LOOP, loop); @@ -3198,7 +3297,7 @@ compiler_while(struct compiler *c, stmt_ty s) } USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -3207,13 +3306,13 @@ compiler_return(struct compiler *c, stmt_ty s) location loc = LOC(s); int preserve_tos = ((s->v.Return.value != NULL) && (s->v.Return.value->kind != Constant_kind)); - if (c->u->u_ste->ste_type != FunctionBlock) + if (c->u->u_ste->ste_type != FunctionBlock) { return compiler_error(c, loc, "'return' outside function"); + } if (s->v.Return.value != NULL && c->u->u_ste->ste_coroutine && c->u->u_ste->ste_generator) { - return compiler_error( - c, loc, "'return' with value in async generator"); + return compiler_error(c, loc, "'return' with value in async generator"); } if (preserve_tos) { @@ -3221,19 +3320,16 @@ compiler_return(struct compiler *c, stmt_ty s) } else { /* Emit instruction with line number for return value */ if (s->v.Return.value != NULL) { - SET_LOC(c, s->v.Return.value); loc = LOC(s->v.Return.value); ADDOP(c, loc, NOP); } } if (s->v.Return.value == NULL || s->v.Return.value->lineno != s->lineno) { - SET_LOC(c, s); loc = LOC(s); ADDOP(c, loc, NOP); } - if (!compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL)) - return 0; + RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL)); if (s->v.Return.value == NULL) { ADDOP_LOAD_CONST(c, loc, Py_None); } @@ -3242,7 +3338,7 @@ compiler_return(struct compiler *c, stmt_ty s) } ADDOP(c, loc, RETURN_VALUE); - return 1; + return SUCCESS; } static int @@ -3251,17 +3347,13 @@ compiler_break(struct compiler *c, location loc) struct fblockinfo *loop = NULL; /* Emit instruction with line number */ ADDOP(c, loc, NOP); - if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) { - return 0; - } + RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop)); if (loop == NULL) { return compiler_error(c, loc, "'break' outside loop"); } - if (!compiler_unwind_fblock(c, &loc, loop, 0)) { - return 0; - } + RETURN_IF_ERROR(compiler_unwind_fblock(c, &loc, loop, 0)); ADDOP_JUMP(c, loc, JUMP, loop->fb_exit); - return 1; + return SUCCESS; } static int @@ -3270,14 +3362,12 @@ compiler_continue(struct compiler *c, location loc) struct fblockinfo *loop = NULL; /* Emit instruction with line number */ ADDOP(c, loc, NOP); - if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) { - return 0; - } + RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop)); if (loop == NULL) { return compiler_error(c, loc, "'continue' not properly in loop"); } ADDOP_JUMP(c, loc, JUMP, loop->fb_block); - return 1; + return SUCCESS; } @@ -3336,11 +3426,12 @@ compiler_try_finally(struct compiler *c, stmt_ty s) ADDOP_JUMP(c, loc, SETUP_FINALLY, end); USE_LABEL(c, body); - if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.Try.finalbody)) - return 0; + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, FINALLY_TRY, body, end, + s->v.Try.finalbody)); + if (s->v.Try.handlers && asdl_seq_LEN(s->v.Try.handlers)) { - if (!compiler_try_except(c, s)) - return 0; + RETURN_IF_ERROR(compiler_try_except(c, s)); } else { VISIT_SEQ(c, stmt, s->v.Try.body); @@ -3354,12 +3445,11 @@ compiler_try_finally(struct compiler *c, stmt_ty s) USE_LABEL(c, end); - UNSET_LOC(c); loc = NO_LOCATION; ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup); ADDOP(c, loc, PUSH_EXC_INFO); - if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)) - return 0; + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)); VISIT_SEQ(c, stmt, s->v.Try.finalbody); loc = location_of_last_executing_statement(s->v.Try.finalbody); compiler_pop_fblock(c, FINALLY_END, end); @@ -3370,7 +3460,7 @@ compiler_try_finally(struct compiler *c, stmt_ty s) POP_EXCEPT_AND_RERAISE(c, loc); USE_LABEL(c, exit); - return 1; + return SUCCESS; } static int @@ -3386,13 +3476,12 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s) ADDOP_JUMP(c, loc, SETUP_FINALLY, end); USE_LABEL(c, body); - if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.TryStar.finalbody)) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, FINALLY_TRY, body, end, + s->v.TryStar.finalbody)); + if (s->v.TryStar.handlers && asdl_seq_LEN(s->v.TryStar.handlers)) { - if (!compiler_try_star_except(c, s)) { - return 0; - } + RETURN_IF_ERROR(compiler_try_star_except(c, s)); } else { VISIT_SEQ(c, stmt, s->v.TryStar.body); @@ -3406,13 +3495,12 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s) /* `finally` block */ USE_LABEL(c, end); - UNSET_LOC(c); loc = NO_LOCATION; ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup); ADDOP(c, loc, PUSH_EXC_INFO); - if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)); + VISIT_SEQ(c, stmt, s->v.TryStar.finalbody); loc = location_of_last_executing_statement(s->v.Try.finalbody); @@ -3423,7 +3511,7 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s) POP_EXCEPT_AND_RERAISE(c, loc); USE_LABEL(c, exit); - return 1; + return SUCCESS; } @@ -3469,8 +3557,8 @@ compiler_try_except(struct compiler *c, stmt_ty s) ADDOP_JUMP(c, loc, SETUP_FINALLY, except); USE_LABEL(c, body); - if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)) - return 0; + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)); VISIT_SEQ(c, stmt, s->v.Try.body); compiler_pop_fblock(c, TRY_EXCEPT, body); ADDOP(c, NO_LOCATION, POP_BLOCK); @@ -3482,16 +3570,16 @@ compiler_try_except(struct compiler *c, stmt_ty s) USE_LABEL(c, except); - UNSET_LOC(c); ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup); ADDOP(c, NO_LOCATION, PUSH_EXC_INFO); + /* Runtime will push a block here, so we need to account for that */ - if (!compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL)) - return 0; + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL)); + for (i = 0; i < n; i++) { excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( s->v.Try.handlers, i); - SET_LOC(c, handler); location loc = LOC(handler); if (!handler->v.ExceptHandler.type && i < n-1) { return compiler_error(c, loc, "default 'except:' must be last"); @@ -3507,7 +3595,8 @@ compiler_try_except(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, cleanup_end); NEW_JUMP_TARGET_LABEL(c, cleanup_body); - compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store); + RETURN_IF_ERROR( + compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store)); /* try: @@ -3524,33 +3613,33 @@ compiler_try_except(struct compiler *c, stmt_ty s) ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end); USE_LABEL(c, cleanup_body); - if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, - NO_LABEL, handler->v.ExceptHandler.name)) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, + NO_LABEL, handler->v.ExceptHandler.name)); /* second # body */ VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body); /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); ADDOP(c, NO_LOCATION, POP_BLOCK); ADDOP(c, NO_LOCATION, POP_BLOCK); ADDOP(c, NO_LOCATION, POP_EXCEPT); ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store)); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del)); ADDOP_JUMP(c, NO_LOCATION, JUMP, end); /* except: */ USE_LABEL(c, cleanup_end); - /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); - + /* name = None; del name; # artificial */ ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store)); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del)); ADDOP_I(c, NO_LOCATION, RERAISE, 1); } @@ -3560,11 +3649,12 @@ compiler_try_except(struct compiler *c, stmt_ty s) ADDOP(c, loc, POP_TOP); /* exc_value */ USE_LABEL(c, cleanup_body); - if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, NULL)) - return 0; + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, + NO_LABEL, NULL)); + VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body); - UNSET_LOC(c); ADDOP(c, NO_LOCATION, POP_BLOCK); ADDOP(c, NO_LOCATION, POP_EXCEPT); ADDOP_JUMP(c, NO_LOCATION, JUMP, end); @@ -3572,8 +3662,7 @@ compiler_try_except(struct compiler *c, stmt_ty s) USE_LABEL(c, except); } - /* Mark as artificial */ - UNSET_LOC(c); + /* artificial */ compiler_pop_fblock(c, EXCEPTION_HANDLER, NO_LABEL); ADDOP_I(c, NO_LOCATION, RERAISE, 0); @@ -3581,7 +3670,7 @@ compiler_try_except(struct compiler *c, stmt_ty s) POP_EXCEPT_AND_RERAISE(c, NO_LOCATION); USE_LABEL(c, end); - return 1; + return SUCCESS; } /* @@ -3648,9 +3737,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) ADDOP_JUMP(c, loc, SETUP_FINALLY, except); USE_LABEL(c, body); - if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)); VISIT_SEQ(c, stmt, s->v.TryStar.body); compiler_pop_fblock(c, TRY_EXCEPT, body); ADDOP(c, NO_LOCATION, POP_BLOCK); @@ -3659,18 +3747,17 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) USE_LABEL(c, except); - UNSET_LOC(c); ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup); ADDOP(c, NO_LOCATION, PUSH_EXC_INFO); + /* Runtime will push a block here, so we need to account for that */ - if (!compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER, - NO_LABEL, NO_LABEL, "except handler")) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER, + NO_LABEL, NO_LABEL, "except handler")); + for (Py_ssize_t i = 0; i < n; i++) { excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( s->v.TryStar.handlers, i); - SET_LOC(c, handler); location loc = LOC(handler); NEW_JUMP_TARGET_LABEL(c, next_except); except = next_except; @@ -3707,7 +3794,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, cleanup_body); if (handler->v.ExceptHandler.name) { - compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store); + RETURN_IF_ERROR( + compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store)); } else { ADDOP(c, loc, POP_TOP); // match @@ -3727,33 +3815,34 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end); USE_LABEL(c, cleanup_body); - if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, handler->v.ExceptHandler.name)) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, + NO_LABEL, handler->v.ExceptHandler.name)); /* second # body */ VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body); - /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); + /* name = None; del name; # artificial */ ADDOP(c, NO_LOCATION, POP_BLOCK); if (handler->v.ExceptHandler.name) { ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store)); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del)); } ADDOP_JUMP(c, NO_LOCATION, JUMP, except); /* except: */ USE_LABEL(c, cleanup_end); - /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); - + /* name = None; del name; # artificial */ if (handler->v.ExceptHandler.name) { ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store)); + RETURN_IF_ERROR( + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del)); } /* add exception raised to the res list */ @@ -3769,8 +3858,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) ADDOP_JUMP(c, NO_LOCATION, JUMP, reraise_star); } } - /* Mark as artificial */ - UNSET_LOC(c); + /* artificial */ compiler_pop_fblock(c, EXCEPTION_GROUP_HANDLER, NO_LABEL); NEW_JUMP_TARGET_LABEL(c, reraise); @@ -3798,7 +3886,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) VISIT_SEQ(c, stmt, s->v.TryStar.orelse); USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -3832,19 +3920,22 @@ compiler_import_as(struct compiler *c, location loc, */ Py_ssize_t len = PyUnicode_GET_LENGTH(name); Py_ssize_t dot = PyUnicode_FindChar(name, '.', 0, len, 1); - if (dot == -2) - return 0; + if (dot == -2) { + return ERROR; + } if (dot != -1) { /* Consume the base module name to get the first attribute */ while (1) { Py_ssize_t pos = dot + 1; PyObject *attr; dot = PyUnicode_FindChar(name, '.', pos, len, 1); - if (dot == -2) - return 0; + if (dot == -2) { + return ERROR; + } attr = PyUnicode_Substring(name, pos, (dot != -1) ? dot : len); - if (!attr) - return 0; + if (!attr) { + return ERROR; + } ADDOP_N(c, loc, IMPORT_FROM, attr, names); if (dot == -1) { break; @@ -3852,11 +3943,9 @@ compiler_import_as(struct compiler *c, location loc, ADDOP_I(c, loc, SWAP, 2); ADDOP(c, loc, POP_TOP); } - if (!compiler_nameop(c, loc, asname, Store)) { - return 0; - } + RETURN_IF_ERROR(compiler_nameop(c, loc, asname, Store)); ADDOP(c, loc, POP_TOP); - return 1; + return SUCCESS; } return compiler_nameop(c, loc, asname, Store); } @@ -3885,8 +3974,7 @@ compiler_import(struct compiler *c, stmt_ty s) if (alias->asname) { r = compiler_import_as(c, loc, alias->name, alias->asname); - if (!r) - return r; + RETURN_IF_ERROR(r); } else { identifier tmp = alias->name; @@ -3894,18 +3982,18 @@ compiler_import(struct compiler *c, stmt_ty s) alias->name, '.', 0, PyUnicode_GET_LENGTH(alias->name), 1); if (dot != -1) { tmp = PyUnicode_Substring(alias->name, 0, dot); - if (tmp == NULL) - return 0; + if (tmp == NULL) { + return ERROR; + } } r = compiler_nameop(c, loc, tmp, Store); if (dot != -1) { Py_DECREF(tmp); } - if (!r) - return r; + RETURN_IF_ERROR(r); } } - return 1; + return SUCCESS; } static int @@ -3917,17 +4005,16 @@ compiler_from_import(struct compiler *c, stmt_ty s) PyObject *names = PyTuple_New(n); if (!names) { - return 0; + return ERROR; } /* build up the names */ for (Py_ssize_t i = 0; i < n; i++) { alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); - Py_INCREF(alias->name); - PyTuple_SET_ITEM(names, i, alias->name); + PyTuple_SET_ITEM(names, i, Py_NewRef(alias->name)); } - if (location_is_after(LOC(s), c->c_future->ff_location) && + if (location_is_after(LOC(s), c->c_future.ff_location) && s->v.ImportFrom.module && _PyUnicode_EqualToASCIIString(s->v.ImportFrom.module, "__future__")) { @@ -3951,7 +4038,7 @@ compiler_from_import(struct compiler *c, stmt_ty s) if (i == 0 && PyUnicode_READ_CHAR(alias->name, 0) == '*') { assert(n == 1); ADDOP(c, LOC(s), IMPORT_STAR); - return 1; + return SUCCESS; } ADDOP_NAME(c, LOC(s), IMPORT_FROM, alias->name, names); @@ -3960,13 +4047,11 @@ compiler_from_import(struct compiler *c, stmt_ty s) store_name = alias->asname; } - if (!compiler_nameop(c, LOC(s), store_name, Store)) { - return 0; - } + RETURN_IF_ERROR(compiler_nameop(c, LOC(s), store_name, Store)); } /* remove imported module */ ADDOP(c, LOC(s), POP_TOP); - return 1; + return SUCCESS; } static int @@ -3979,19 +4064,15 @@ compiler_assert(struct compiler *c, stmt_ty s) PyTuple_Check(s->v.Assert.test->v.Constant.value) && PyTuple_Size(s->v.Assert.test->v.Constant.value) > 0)) { - if (!compiler_warn(c, LOC(s), "assertion is always true, " - "perhaps remove parentheses?")) - { - return 0; - } + RETURN_IF_ERROR( + compiler_warn(c, LOC(s), "assertion is always true, " + "perhaps remove parentheses?")); } if (c->c_optimize) { - return 1; + return SUCCESS; } NEW_JUMP_TARGET_LABEL(c, end); - if (!compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1)) { - return 0; - } + RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1)); ADDOP(c, LOC(s), LOAD_ASSERTION_ERROR); if (s->v.Assert.msg) { VISIT(c, expr, s->v.Assert.msg); @@ -4000,7 +4081,7 @@ compiler_assert(struct compiler *c, stmt_ty s) ADDOP_I(c, LOC(s), RAISE_VARARGS, 1); USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -4009,18 +4090,18 @@ compiler_stmt_expr(struct compiler *c, location loc, expr_ty value) if (c->c_interactive && c->c_nestlevel <= 1) { VISIT(c, expr, value); ADDOP(c, loc, PRINT_EXPR); - return 1; + return SUCCESS; } if (value->kind == Constant_kind) { /* ignore constant statement */ ADDOP(c, loc, NOP); - return 1; + return SUCCESS; } VISIT(c, expr, value); ADDOP(c, NO_LOCATION, POP_TOP); /* artificial */ - return 1; + return SUCCESS; } static int @@ -4116,21 +4197,21 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s) return compiler_async_for(c, s); } - return 1; + return SUCCESS; } static int -unaryop(unaryop_ty op) +unaryop(unaryop_ty op, bool R) { switch (op) { case Invert: - return UNARY_INVERT; + return R ? UNARY_INVERT_R : UNARY_INVERT; case Not: - return UNARY_NOT; + return R ? UNARY_NOT_R : UNARY_NOT; case UAdd: - return UNARY_POSITIVE; + return R ? UNARY_POSITIVE_R : UNARY_POSITIVE; case USub: - return UNARY_NEGATIVE; + return R ? UNARY_NEGATIVE_R : UNARY_NEGATIVE; default: PyErr_Format(PyExc_SystemError, "unary op %d should not be possible", op); @@ -4186,10 +4267,10 @@ addop_binary(struct compiler *c, location loc, operator_ty binop, default: PyErr_Format(PyExc_SystemError, "%s op %d should not be possible", inplace ? "inplace" : "binary", binop); - return 0; + return ERROR; } ADDOP_I(c, loc, BINARY_OP, oparg); - return 1; + return SUCCESS; } @@ -4200,7 +4281,7 @@ addop_yield(struct compiler *c, location loc) { } ADDOP_I(c, loc, YIELD_VALUE, 0); ADDOP_I(c, loc, RESUME, 1); - return 1; + return SUCCESS; } static int @@ -4218,12 +4299,14 @@ compiler_nameop(struct compiler *c, location loc, !_PyUnicode_EqualToASCIIString(name, "True") && !_PyUnicode_EqualToASCIIString(name, "False")); - if (forbidden_name(c, loc, name, ctx)) - return 0; + if (forbidden_name(c, loc, name, ctx)) { + return ERROR; + } mangled = _Py_Mangle(c->u->u_private, name); - if (!mangled) - return 0; + if (!mangled) { + return ERROR; + } op = 0; optype = OP_NAME; @@ -4273,7 +4356,7 @@ compiler_nameop(struct compiler *c, location loc, case Del: op = DELETE_FAST; break; } ADDOP_N(c, loc, op, mangled, varnames); - return 1; + return SUCCESS; case OP_GLOBAL: switch (ctx) { case Load: op = LOAD_GLOBAL; break; @@ -4294,7 +4377,7 @@ compiler_nameop(struct compiler *c, location loc, arg = dict_add_o(dict, mangled); Py_DECREF(mangled); if (arg < 0) { - return 0; + return ERROR; } if (op == LOAD_GLOBAL) { arg <<= 1; @@ -4329,7 +4412,7 @@ compiler_boolop(struct compiler *c, expr_ty e) VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n)); USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -4341,13 +4424,12 @@ starunpack_helper(struct compiler *c, location loc, if (n > 2 && are_all_items_const(elts, 0, n)) { PyObject *folded = PyTuple_New(n); if (folded == NULL) { - return 0; + return ERROR; } PyObject *val; for (Py_ssize_t i = 0; i < n; i++) { val = ((expr_ty)asdl_seq_GET(elts, i))->v.Constant.value; - Py_INCREF(val); - PyTuple_SET_ITEM(folded, i, val); + PyTuple_SET_ITEM(folded, i, Py_NewRef(val)); } if (tuple && !pushed) { ADDOP_LOAD_CONST_NEW(c, loc, folded); @@ -4355,7 +4437,7 @@ starunpack_helper(struct compiler *c, location loc, if (add == SET_ADD) { Py_SETREF(folded, PyFrozenSet_New(folded)); if (folded == NULL) { - return 0; + return ERROR; } } ADDOP_I(c, loc, build, pushed); @@ -4365,7 +4447,7 @@ starunpack_helper(struct compiler *c, location loc, ADDOP(c, loc, LIST_TO_TUPLE); } } - return 1; + return SUCCESS; } int big = n+pushed > STACK_USE_GUIDELINE; @@ -4387,7 +4469,7 @@ starunpack_helper(struct compiler *c, location loc, } else { ADDOP_I(c, loc, build, n+pushed); } - return 1; + return SUCCESS; } int sequence_built = 0; if (big) { @@ -4415,7 +4497,7 @@ starunpack_helper(struct compiler *c, location loc, if (tuple) { ADDOP(c, loc, LIST_TO_TUPLE); } - return 1; + return SUCCESS; } static int @@ -4427,10 +4509,11 @@ unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts) expr_ty elt = asdl_seq_GET(elts, i); if (elt->kind == Starred_kind && !seen_star) { if ((i >= (1 << 8)) || - (n-i-1 >= (INT_MAX >> 8))) + (n-i-1 >= (INT_MAX >> 8))) { return compiler_error(c, loc, "too many expressions in " "star-unpacking assignment"); + } ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8))); seen_star = 1; } @@ -4442,19 +4525,19 @@ unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts) if (!seen_star) { ADDOP_I(c, loc, UNPACK_SEQUENCE, n); } - return 1; + return SUCCESS; } static int assignment_helper(struct compiler *c, location loc, asdl_expr_seq *elts) { Py_ssize_t n = asdl_seq_LEN(elts); - RETURN_IF_FALSE(unpack_helper(c, loc, elts)); + RETURN_IF_ERROR(unpack_helper(c, loc, elts)); for (Py_ssize_t i = 0; i < n; i++) { expr_ty elt = asdl_seq_GET(elts, i); VISIT(c, expr, elt->kind != Starred_kind ? elt : elt->v.Starred.value); } - return 1; + return SUCCESS; } static int @@ -4469,9 +4552,10 @@ compiler_list(struct compiler *c, expr_ty e) return starunpack_helper(c, loc, elts, 0, BUILD_LIST, LIST_APPEND, LIST_EXTEND, 0); } - else + else { VISIT_SEQ(c, expr, elts); - return 1; + } + return SUCCESS; } static int @@ -4486,9 +4570,10 @@ compiler_tuple(struct compiler *c, expr_ty e) return starunpack_helper(c, loc, elts, 0, BUILD_LIST, LIST_APPEND, LIST_EXTEND, 1); } - else + else { VISIT_SEQ(c, expr, elts); - return 1; + } + return SUCCESS; } static int @@ -4499,16 +4584,16 @@ compiler_set(struct compiler *c, expr_ty e) BUILD_SET, SET_ADD, SET_UPDATE, 0); } -static int +static bool are_all_items_const(asdl_expr_seq *seq, Py_ssize_t begin, Py_ssize_t end) { - Py_ssize_t i; - for (i = begin; i < end; i++) { + for (Py_ssize_t i = begin; i < end; i++) { expr_ty key = (expr_ty)asdl_seq_GET(seq, i); - if (key == NULL || key->kind != Constant_kind) - return 0; + if (key == NULL || key->kind != Constant_kind) { + return false; + } } - return 1; + return true; } static int @@ -4524,16 +4609,15 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end } keys = PyTuple_New(n); if (keys == NULL) { - return 0; + return SUCCESS; } for (i = begin; i < end; i++) { key = ((expr_ty)asdl_seq_GET(e->v.Dict.keys, i))->v.Constant.value; - Py_INCREF(key); - PyTuple_SET_ITEM(keys, i - begin, key); + PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key)); } ADDOP_LOAD_CONST_NEW(c, loc, keys); ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n); - return 1; + return SUCCESS; } if (big) { ADDOP_I(c, loc, BUILD_MAP, 0); @@ -4548,7 +4632,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end if (!big) { ADDOP_I(c, loc, BUILD_MAP, n); } - return 1; + return SUCCESS; } static int @@ -4565,9 +4649,7 @@ compiler_dict(struct compiler *c, expr_ty e) is_unpacking = (expr_ty)asdl_seq_GET(e->v.Dict.keys, i) == NULL; if (is_unpacking) { if (elements) { - if (!compiler_subdict(c, e, i - elements, i)) { - return 0; - } + RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i)); if (have_dict) { ADDOP_I(c, loc, DICT_UPDATE, 1); } @@ -4583,9 +4665,7 @@ compiler_dict(struct compiler *c, expr_ty e) } else { if (elements*2 > STACK_USE_GUIDELINE) { - if (!compiler_subdict(c, e, i - elements, i + 1)) { - return 0; - } + RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i + 1)); if (have_dict) { ADDOP_I(c, loc, DICT_UPDATE, 1); } @@ -4598,9 +4678,7 @@ compiler_dict(struct compiler *c, expr_ty e) } } if (elements) { - if (!compiler_subdict(c, e, n - elements, n)) { - return 0; - } + RETURN_IF_ERROR(compiler_subdict(c, e, n - elements, n)); if (have_dict) { ADDOP_I(c, loc, DICT_UPDATE, 1); } @@ -4609,7 +4687,7 @@ compiler_dict(struct compiler *c, expr_ty e) if (!have_dict) { ADDOP_I(c, loc, BUILD_MAP, 0); } - return 1; + return SUCCESS; } static int @@ -4618,9 +4696,7 @@ compiler_compare(struct compiler *c, expr_ty e) location loc = LOC(e); Py_ssize_t i, n; - if (!check_compare(c, e)) { - return 0; - } + RETURN_IF_ERROR(check_compare(c, e)); VISIT(c, expr, e->v.Compare.left); assert(asdl_seq_LEN(e->v.Compare.ops) > 0); n = asdl_seq_LEN(e->v.Compare.ops) - 1; @@ -4649,7 +4725,7 @@ compiler_compare(struct compiler *c, expr_ty e) USE_LABEL(c, end); } - return 1; + return SUCCESS; } static PyTypeObject * @@ -4702,7 +4778,7 @@ check_caller(struct compiler *c, expr_ty e) infer_type(e)->tp_name); } default: - return 1; + return SUCCESS; } } @@ -4718,7 +4794,7 @@ check_subscripter(struct compiler *c, expr_ty e) PyLong_Check(v) || PyFloat_Check(v) || PyComplex_Check(v) || PyAnySet_Check(v))) { - return 1; + return SUCCESS; } /* fall through */ case Set_kind: @@ -4731,7 +4807,7 @@ check_subscripter(struct compiler *c, expr_ty e) infer_type(e)->tp_name); } default: - return 1; + return SUCCESS; } } @@ -4744,14 +4820,14 @@ check_index(struct compiler *c, expr_ty e, expr_ty s) if (index_type == NULL || PyType_FastSubclass(index_type, Py_TPFLAGS_LONG_SUBCLASS) || index_type == &PySlice_Type) { - return 1; + return SUCCESS; } switch (e->kind) { case Constant_kind: v = e->v.Constant.value; if (!(PyUnicode_Check(v) || PyBytes_Check(v) || PyTuple_Check(v))) { - return 1; + return SUCCESS; } /* fall through */ case Tuple_kind: @@ -4767,7 +4843,7 @@ check_index(struct compiler *c, expr_ty e, expr_ty s) index_type->tp_name); } default: - return 1; + return SUCCESS; } } @@ -4816,7 +4892,7 @@ update_start_location_to_match_attr(struct compiler *c, location loc, return loc; } -// Return 1 if the method call was optimized, -1 if not, and 0 on error. +// Return 1 if the method call was optimized, 0 if not, and -1 on error. static int maybe_optimize_method_call(struct compiler *c, expr_ty e) { @@ -4827,37 +4903,36 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e) /* Check that the call node is an attribute access */ if (meth->kind != Attribute_kind || meth->v.Attribute.ctx != Load) { - return -1; + return 0; } /* Check that the base object is not something that is imported */ if (is_import_originated(c, meth->v.Attribute.value)) { - return -1; + return 0; } /* Check that there aren't too many arguments */ argsl = asdl_seq_LEN(args); kwdsl = asdl_seq_LEN(kwds); if (argsl + kwdsl + (kwdsl != 0) >= STACK_USE_GUIDELINE) { - return -1; + return 0; } /* Check that there are no *varargs types of arguments. */ for (i = 0; i < argsl; i++) { expr_ty elt = asdl_seq_GET(args, i); if (elt->kind == Starred_kind) { - return -1; + return 0; } } for (i = 0; i < kwdsl; i++) { keyword_ty kw = asdl_seq_GET(kwds, i); if (kw->arg == NULL) { - return -1; + return 0; } } /* Alright, we can optimize the code. */ VISIT(c, expr, meth->v.Attribute.value); - SET_LOC(c, meth); location loc = LOC(meth); loc = update_start_location_to_match_attr(c, loc, meth); ADDOP_NAME(c, loc, LOAD_METHOD, meth->v.Attribute.attr, names); @@ -4865,13 +4940,10 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e) if (kwdsl) { VISIT_SEQ(c, keyword, kwds); - if (!compiler_call_simple_kw_helper(c, loc, kwds, kwdsl)) { - return 0; - }; + RETURN_IF_ERROR( + compiler_call_simple_kw_helper(c, loc, kwds, kwdsl)); } - SET_LOC(c, e); - loc = LOC(e); - loc = update_start_location_to_match_attr(c, loc, meth); + loc = update_start_location_to_match_attr(c, LOC(e), meth); ADDOP_I(c, loc, CALL, argsl + kwdsl); return 1; } @@ -4887,37 +4959,33 @@ validate_keywords(struct compiler *c, asdl_keyword_seq *keywords) } location loc = LOC(key); if (forbidden_name(c, loc, key->arg, Store)) { - return -1; + return ERROR; } for (Py_ssize_t j = i + 1; j < nkeywords; j++) { keyword_ty other = ((keyword_ty)asdl_seq_GET(keywords, j)); if (other->arg && !PyUnicode_Compare(key->arg, other->arg)) { - SET_LOC(c, other); compiler_error(c, LOC(other), "keyword argument repeated: %U", key->arg); - return -1; + return ERROR; } } } - return 0; + return SUCCESS; } static int compiler_call(struct compiler *c, expr_ty e) { - if (validate_keywords(c, e->v.Call.keywords) == -1) { - return 0; - } + RETURN_IF_ERROR(validate_keywords(c, e->v.Call.keywords)); int ret = maybe_optimize_method_call(c, e); - if (ret >= 0) { - return ret; + if (ret < 0) { + return ERROR; } - if (!check_caller(c, e->v.Call.func)) { - return 0; + if (ret == 1) { + return SUCCESS; } - SET_LOC(c, e->v.Call.func); + RETURN_IF_ERROR(check_caller(c, e->v.Call.func)); location loc = LOC(e->v.Call.func); ADDOP(c, loc, PUSH_NULL); - SET_LOC(c, e); VISIT(c, expr, e->v.Call.func); loc = LOC(e); return compiler_call_helper(c, loc, 0, @@ -4947,7 +5015,7 @@ compiler_joined_str(struct compiler *c, expr_ty e) ADDOP_I(c, loc, BUILD_STRING, asdl_seq_LEN(e->v.JoinedStr.values)); } } - return 1; + return SUCCESS; } /* Used to implement f-strings. Format a single value. */ @@ -4982,7 +5050,7 @@ compiler_formatted_value(struct compiler *c, expr_ty e) default: PyErr_Format(PyExc_SystemError, "Unrecognized conversion character %d", conversion); - return 0; + return ERROR; } if (e->v.FormattedValue.format_spec) { /* Evaluate the format spec, and update our opcode arg. */ @@ -4994,7 +5062,7 @@ compiler_formatted_value(struct compiler *c, expr_ty e) location loc = LOC(e); ADDOP_I(c, loc, FORMAT_VALUE, oparg); - return 1; + return SUCCESS; } static int @@ -5014,16 +5082,15 @@ compiler_subkwargs(struct compiler *c, location loc, } keys = PyTuple_New(n); if (keys == NULL) { - return 0; + return ERROR; } for (i = begin; i < end; i++) { key = ((keyword_ty) asdl_seq_GET(keywords, i))->arg; - Py_INCREF(key); - PyTuple_SET_ITEM(keys, i - begin, key); + PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key)); } ADDOP_LOAD_CONST_NEW(c, loc, keys); ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n); - return 1; + return SUCCESS; } if (big) { ADDOP_I(c, NO_LOCATION, BUILD_MAP, 0); @@ -5039,12 +5106,11 @@ compiler_subkwargs(struct compiler *c, location loc, if (!big) { ADDOP_I(c, loc, BUILD_MAP, n); } - return 1; + return SUCCESS; } /* Used by compiler_call_helper and maybe_optimize_method_call to emit * KW_NAMES before CALL. - * Returns 1 on success, 0 on error. */ static int compiler_call_simple_kw_helper(struct compiler *c, location loc, @@ -5053,20 +5119,19 @@ compiler_call_simple_kw_helper(struct compiler *c, location loc, PyObject *names; names = PyTuple_New(nkwelts); if (names == NULL) { - return 0; + return ERROR; } for (int i = 0; i < nkwelts; i++) { keyword_ty kw = asdl_seq_GET(keywords, i); - Py_INCREF(kw->arg); - PyTuple_SET_ITEM(names, i, kw->arg); + PyTuple_SET_ITEM(names, i, Py_NewRef(kw->arg)); } Py_ssize_t arg = compiler_add_const(c, names); if (arg < 0) { - return 0; + return ERROR; } Py_DECREF(names); ADDOP_I(c, loc, KW_NAMES, arg); - return 1; + return SUCCESS; } @@ -5079,9 +5144,7 @@ compiler_call_helper(struct compiler *c, location loc, { Py_ssize_t i, nseen, nelts, nkwelts; - if (validate_keywords(c, keywords) == -1) { - return 0; - } + RETURN_IF_ERROR(validate_keywords(c, keywords)); nelts = asdl_seq_LEN(args); nkwelts = asdl_seq_LEN(keywords); @@ -5110,12 +5173,11 @@ compiler_call_helper(struct compiler *c, location loc, } if (nkwelts) { VISIT_SEQ(c, keyword, keywords); - if (!compiler_call_simple_kw_helper(c, loc, keywords, nkwelts)) { - return 0; - }; + RETURN_IF_ERROR( + compiler_call_simple_kw_helper(c, loc, keywords, nkwelts)); } ADDOP_I(c, loc, CALL, n + nelts + nkwelts); - return 1; + return SUCCESS; ex_call: @@ -5124,8 +5186,8 @@ compiler_call_helper(struct compiler *c, location loc, VISIT(c, expr, ((expr_ty)asdl_seq_GET(args, 0))->v.Starred.value); } else if (starunpack_helper(c, loc, args, n, BUILD_LIST, - LIST_APPEND, LIST_EXTEND, 1) == 0) { - return 0; + LIST_APPEND, LIST_EXTEND, 1) < 0) { + return ERROR; } /* Then keyword arguments */ if (nkwelts) { @@ -5138,9 +5200,7 @@ compiler_call_helper(struct compiler *c, location loc, if (kw->arg == NULL) { /* A keyword argument unpacking. */ if (nseen) { - if (!compiler_subkwargs(c, loc, keywords, i - nseen, i)) { - return 0; - } + RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, i - nseen, i)); if (have_dict) { ADDOP_I(c, loc, DICT_MERGE, 1); } @@ -5160,9 +5220,7 @@ compiler_call_helper(struct compiler *c, location loc, } if (nseen) { /* Pack up any trailing keyword arguments. */ - if (!compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts)) { - return 0; - } + RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts)); if (have_dict) { ADDOP_I(c, loc, DICT_MERGE, 1); } @@ -5171,7 +5229,7 @@ compiler_call_helper(struct compiler *c, location loc, assert(have_dict); } ADDOP_I(c, loc, CALL_FUNCTION_EX, nkwelts > 0); - return 1; + return SUCCESS; } @@ -5266,17 +5324,14 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc, Py_ssize_t n = asdl_seq_LEN(gen->ifs); for (Py_ssize_t i = 0; i < n; i++) { expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i); - if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) { - return 0; - } + RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0)); } if (++gen_index < asdl_seq_LEN(generators)) { - if (!compiler_comprehension_generator(c, loc, - generators, gen_index, depth, - elt, val, type)) { - return 0; - } + RETURN_IF_ERROR( + compiler_comprehension_generator(c, loc, + generators, gen_index, depth, + elt, val, type)); } location elt_loc = LOC(elt); @@ -5310,7 +5365,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc, ADDOP_I(c, elt_loc, MAP_ADD, depth + 1); break; default: - return 0; + return ERROR; } } @@ -5322,7 +5377,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc, ADDOP(c, NO_LOCATION, END_FOR); } - return 1; + return SUCCESS; } static int @@ -5351,10 +5406,9 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, USE_LABEL(c, start); /* Runtime will push a block here, so we need to account for that */ - if (!compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR, - start, NO_LABEL, NULL)) { - return 0; - } + RETURN_IF_ERROR( + compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR, + start, NO_LABEL, NULL)); ADDOP_JUMP(c, loc, SETUP_FINALLY, except); ADDOP(c, loc, GET_ANEXT); @@ -5366,18 +5420,15 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, Py_ssize_t n = asdl_seq_LEN(gen->ifs); for (Py_ssize_t i = 0; i < n; i++) { expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i); - if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) { - return 0; - } + RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0)); } depth++; if (++gen_index < asdl_seq_LEN(generators)) { - if (!compiler_comprehension_generator(c, loc, - generators, gen_index, depth, - elt, val, type)) { - return 0; - } + RETURN_IF_ERROR( + compiler_comprehension_generator(c, loc, + generators, gen_index, depth, + elt, val, type)); } location elt_loc = LOC(elt); @@ -5410,7 +5461,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, ADDOP_I(c, elt_loc, MAP_ADD, depth + 1); break; default: - return 0; + return ERROR; } } @@ -5423,7 +5474,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, ADDOP(c, loc, END_ASYNC_FOR); - return 1; + return SUCCESS; } static int @@ -5439,12 +5490,11 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, int is_top_level_await = IS_TOP_LEVEL_AWAIT(c); outermost = (comprehension_ty) asdl_seq_GET(generators, 0); - if (!compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION, - (void *)e, e->lineno)) + if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION, + (void *)e, e->lineno) < 0) { goto error; } - SET_LOC(c, e); location loc = LOC(e); is_async_generator = c->u->u_ste->ste_coroutine; @@ -5480,27 +5530,32 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, ADDOP_I(c, loc, op, 0); } - if (!compiler_comprehension_generator(c, loc, generators, 0, 0, - elt, val, type)) { + if (compiler_comprehension_generator(c, loc, generators, 0, 0, + elt, val, type) < 0) { goto error_in_scope; } if (type != COMP_GENEXP) { ADDOP(c, LOC(e), RETURN_VALUE); } + if (type == COMP_GENEXP) { + if (wrap_in_stopiteration_handler(c) < 0) { + goto error_in_scope; + } + } co = assemble(c, 1); - qualname = c->u->u_qualname; - Py_INCREF(qualname); + qualname = Py_NewRef(c->u->u_qualname); compiler_exit_scope(c); if (is_top_level_await && is_async_generator){ c->u->u_ste->ste_coroutine = 1; } - if (co == NULL) + if (co == NULL) { goto error; + } loc = LOC(e); - if (!compiler_make_closure(c, loc, co, 0, qualname)) { + if (compiler_make_closure(c, loc, co, 0, qualname) < 0) { goto error; } Py_DECREF(qualname); @@ -5523,13 +5578,13 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, ADD_YIELD_FROM(c, loc, 1); } - return 1; + return SUCCESS; error_in_scope: compiler_exit_scope(c); error: Py_XDECREF(qualname); Py_XDECREF(co); - return 0; + return ERROR; } static int @@ -5578,13 +5633,12 @@ static int compiler_visit_keyword(struct compiler *c, keyword_ty k) { VISIT(c, expr, k->value); - return 1; + return SUCCESS; } static int compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) { - UNSET_LOC(c); NEW_JUMP_TARGET_LABEL(c, suppress); ADDOP_JUMP(c, NO_LOCATION, POP_JUMP_IF_TRUE, suppress); ADDOP_I(c, NO_LOCATION, RERAISE, 2); @@ -5602,7 +5656,7 @@ compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) { POP_EXCEPT_AND_RERAISE(c, NO_LOCATION); USE_LABEL(c, exit); - return 1; + return SUCCESS; } /* @@ -5659,15 +5713,13 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) /* SETUP_WITH pushes a finally block. */ USE_LABEL(c, block); - if (!compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s)) { - return 0; - } + RETURN_IF_ERROR(compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s)); if (item->optional_vars) { VISIT(c, expr, item->optional_vars); } else { - /* Discard result from context.__aenter__() */ + /* Discard result from context.__aenter__() */ ADDOP(c, loc, POP_TOP); } @@ -5676,21 +5728,19 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) /* BLOCK code */ VISIT_SEQ(c, stmt, s->v.AsyncWith.body) } - else if (!compiler_async_with(c, s, pos)) { - return 0; + else { + RETURN_IF_ERROR(compiler_async_with(c, s, pos)); } compiler_pop_fblock(c, ASYNC_WITH, block); - SET_LOC(c, s); ADDOP(c, loc, POP_BLOCK); /* End of body; start the cleanup */ /* For successful outcome: * call __exit__(None, None, None) */ - if(!compiler_call_exit_with_nones(c, loc)) - return 0; + RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc)); ADDOP_I(c, loc, GET_AWAITABLE, 2); ADDOP_LOAD_CONST(c, loc, Py_None); ADD_YIELD_FROM(c, loc, 1); @@ -5708,10 +5758,10 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) ADDOP_I(c, loc, GET_AWAITABLE, 2); ADDOP_LOAD_CONST(c, loc, Py_None); ADD_YIELD_FROM(c, loc, 1); - compiler_with_except_finish(c, cleanup); + RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup)); USE_LABEL(c, exit); - return 1; + return SUCCESS; } @@ -5757,9 +5807,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) /* SETUP_WITH pushes a finally block. */ USE_LABEL(c, block); - if (!compiler_push_fblock(c, loc, WITH, block, final, s)) { - return 0; - } + RETURN_IF_ERROR(compiler_push_fblock(c, loc, WITH, block, final, s)); if (item->optional_vars) { VISIT(c, expr, item->optional_vars); @@ -5770,15 +5818,14 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) } pos++; - if (pos == asdl_seq_LEN(s->v.With.items)) + if (pos == asdl_seq_LEN(s->v.With.items)) { /* BLOCK code */ VISIT_SEQ(c, stmt, s->v.With.body) - else if (!compiler_with(c, s, pos)) - return 0; + } + else { + RETURN_IF_ERROR(compiler_with(c, s, pos)); + } - - /* Mark all following code as artificial */ - UNSET_LOC(c); ADDOP(c, NO_LOCATION, POP_BLOCK); compiler_pop_fblock(c, WITH, block); @@ -5787,10 +5834,8 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) /* For successful outcome: * call __exit__(None, None, None) */ - SET_LOC(c, s); loc = LOC(s); - if (!compiler_call_exit_with_nones(c, loc)) - return 0; + RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc)); ADDOP(c, loc, POP_TOP); ADDOP_JUMP(c, loc, JUMP, exit); @@ -5800,10 +5845,10 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup); ADDOP(c, loc, PUSH_EXC_INFO); ADDOP(c, loc, WITH_EXCEPT_START); - compiler_with_except_finish(c, cleanup); + RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup)); USE_LABEL(c, exit); - return 1; + return SUCCESS; } static int @@ -5825,7 +5870,17 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) break; case UnaryOp_kind: VISIT(c, expr, e->v.UnaryOp.operand); - ADDOP(c, loc, unaryop(e->v.UnaryOp.op)); + if (c->c_regcode) { + oparg_t r1 = TMP_OPARG(c->u->u_ntmps++); + oparg_t r2 = TMP_OPARG(c->u->u_ntmps++); + ADDOP_REGS(c, loc, STORE_FAST_R, r1, UNUSED_OPARG, UNUSED_OPARG); + ADDOP_REGS(c, loc, unaryop(e->v.UnaryOp.op, true), + r1, r2, UNUSED_OPARG); + ADDOP_REGS(c, loc, LOAD_FAST_R, r2, UNUSED_OPARG, UNUSED_OPARG); + } else { + ADDOP_REGS(c, loc, unaryop(e->v.UnaryOp.op, false), + UNUSED_OPARG, UNUSED_OPARG, UNUSED_OPARG); + } break; case Lambda_kind: return compiler_lambda(c, e); @@ -5844,8 +5899,9 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) case DictComp_kind: return compiler_dictcomp(c, e); case Yield_kind: - if (c->u->u_ste->ste_type != FunctionBlock) + if (c->u->u_ste->ste_type != FunctionBlock) { return compiler_error(c, loc, "'yield' outside function"); + } if (e->v.Yield.value) { VISIT(c, expr, e->v.Yield.value); } @@ -5855,12 +5911,12 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) ADDOP_YIELD(c, loc); break; case YieldFrom_kind: - if (c->u->u_ste->ste_type != FunctionBlock) + if (c->u->u_ste->ste_type != FunctionBlock) { return compiler_error(c, loc, "'yield' outside function"); - - if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) + } + if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) { return compiler_error(c, loc, "'yield from' inside async function"); - + } VISIT(c, expr, e->v.YieldFrom.value); ADDOP(c, loc, GET_YIELD_FROM_ITER); ADDOP_LOAD_CONST(c, loc, Py_None); @@ -5873,7 +5929,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) } if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION && - c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION){ + c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION) { return compiler_error(c, loc, "'await' outside async function"); } } @@ -5905,7 +5961,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) break; case Store: if (forbidden_name(c, loc, e->v.Attribute.attr, e->v.Attribute.ctx)) { - return 0; + return ERROR; } ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names); break; @@ -5931,9 +5987,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) case Slice_kind: { int n = compiler_slice(c, e); - if (n == 0) { - return 0; - } + RETURN_IF_ERROR(n); ADDOP_I(c, loc, BUILD_SLICE, n); break; } @@ -5945,13 +5999,12 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) case Tuple_kind: return compiler_tuple(c, e); } - return 1; + return SUCCESS; } static int compiler_visit_expr(struct compiler *c, expr_ty e) { - SET_LOC(c, e); int res = compiler_visit_expr1(c, e); return res; } @@ -5970,7 +6023,6 @@ compiler_augassign(struct compiler *c, stmt_ty s) expr_ty e = s->v.AugAssign.target; location loc = LOC(e); - SET_LOC(c, e); switch (e->kind) { case Attribute_kind: @@ -5982,9 +6034,7 @@ compiler_augassign(struct compiler *c, stmt_ty s) case Subscript_kind: VISIT(c, expr, e->v.Subscript.value); if (is_two_element_slice(e->v.Subscript.slice)) { - if (!compiler_slice(c, e->v.Subscript.slice)) { - return 0; - } + RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice)); ADDOP_I(c, loc, COPY, 3); ADDOP_I(c, loc, COPY, 3); ADDOP_I(c, loc, COPY, 3); @@ -5998,14 +6048,13 @@ compiler_augassign(struct compiler *c, stmt_ty s) } break; case Name_kind: - if (!compiler_nameop(c, loc, e->v.Name.id, Load)) - return 0; + RETURN_IF_ERROR(compiler_nameop(c, loc, e->v.Name.id, Load)); break; default: PyErr_Format(PyExc_SystemError, "invalid node type (%d) for augmented assignment", e->kind); - return 0; + return ERROR; } loc = LOC(s); @@ -6013,7 +6062,6 @@ compiler_augassign(struct compiler *c, stmt_ty s) VISIT(c, expr, s->v.AugAssign.value); ADDOP_INPLACE(c, loc, s->v.AugAssign.op); - SET_LOC(c, e); loc = LOC(e); switch (e->kind) { @@ -6040,7 +6088,7 @@ compiler_augassign(struct compiler *c, stmt_ty s) default: Py_UNREACHABLE(); } - return 1; + return SUCCESS; } static int @@ -6048,7 +6096,7 @@ check_ann_expr(struct compiler *c, expr_ty e) { VISIT(c, expr, e); ADDOP(c, LOC(e), POP_TOP); - return 1; + return SUCCESS; } static int @@ -6056,8 +6104,8 @@ check_annotation(struct compiler *c, stmt_ty s) { /* Annotations of complex targets does not produce anything under annotations future */ - if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) { - return 1; + if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { + return SUCCESS; } /* Annotations are only evaluated in a module or class. */ @@ -6065,7 +6113,7 @@ check_annotation(struct compiler *c, stmt_ty s) c->u->u_scope_type == COMPILER_SCOPE_CLASS) { return check_ann_expr(c, s->v.AnnAssign.annotation); } - return 1; + return SUCCESS; } static int @@ -6074,26 +6122,24 @@ check_ann_subscr(struct compiler *c, expr_ty e) /* We check that everything in a subscript is defined at runtime. */ switch (e->kind) { case Slice_kind: - if (e->v.Slice.lower && !check_ann_expr(c, e->v.Slice.lower)) { - return 0; + if (e->v.Slice.lower && check_ann_expr(c, e->v.Slice.lower) < 0) { + return ERROR; } - if (e->v.Slice.upper && !check_ann_expr(c, e->v.Slice.upper)) { - return 0; + if (e->v.Slice.upper && check_ann_expr(c, e->v.Slice.upper) < 0) { + return ERROR; } - if (e->v.Slice.step && !check_ann_expr(c, e->v.Slice.step)) { - return 0; + if (e->v.Slice.step && check_ann_expr(c, e->v.Slice.step) < 0) { + return ERROR; } - return 1; + return SUCCESS; case Tuple_kind: { /* extended slice */ asdl_expr_seq *elts = e->v.Tuple.elts; Py_ssize_t i, n = asdl_seq_LEN(elts); for (i = 0; i < n; i++) { - if (!check_ann_subscr(c, asdl_seq_GET(elts, i))) { - return 0; - } + RETURN_IF_ERROR(check_ann_subscr(c, asdl_seq_GET(elts, i))); } - return 1; + return SUCCESS; } default: return check_ann_expr(c, e); @@ -6116,13 +6162,14 @@ compiler_annassign(struct compiler *c, stmt_ty s) } switch (targ->kind) { case Name_kind: - if (forbidden_name(c, loc, targ->v.Name.id, Store)) - return 0; + if (forbidden_name(c, loc, targ->v.Name.id, Store)) { + return ERROR; + } /* If we have a simple name in a module or class, store annotation. */ if (s->v.AnnAssign.simple && (c->u->u_scope_type == COMPILER_SCOPE_MODULE || c->u->u_scope_type == COMPILER_SCOPE_CLASS)) { - if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) { + if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { VISIT(c, annexpr, s->v.AnnAssign.annotation) } else { @@ -6135,31 +6182,32 @@ compiler_annassign(struct compiler *c, stmt_ty s) } break; case Attribute_kind: - if (forbidden_name(c, loc, targ->v.Attribute.attr, Store)) - return 0; + if (forbidden_name(c, loc, targ->v.Attribute.attr, Store)) { + return ERROR; + } if (!s->v.AnnAssign.value && - !check_ann_expr(c, targ->v.Attribute.value)) { - return 0; + check_ann_expr(c, targ->v.Attribute.value) < 0) { + return ERROR; } break; case Subscript_kind: if (!s->v.AnnAssign.value && - (!check_ann_expr(c, targ->v.Subscript.value) || - !check_ann_subscr(c, targ->v.Subscript.slice))) { - return 0; + (check_ann_expr(c, targ->v.Subscript.value) < 0 || + check_ann_subscr(c, targ->v.Subscript.slice) < 0)) { + return ERROR; } break; default: PyErr_Format(PyExc_SystemError, "invalid node type (%d) for annotated assignment", targ->kind); - return 0; + return ERROR; } /* Annotation is evaluated last. */ - if (!s->v.AnnAssign.simple && !check_annotation(c, s)) { - return 0; + if (!s->v.AnnAssign.simple && check_annotation(c, s) < 0) { + return ERROR; } - return 1; + return SUCCESS; } /* Raises a SyntaxError and returns 0. @@ -6175,12 +6223,11 @@ compiler_error(struct compiler *c, location loc, PyObject *msg = PyUnicode_FromFormatV(format, vargs); va_end(vargs); if (msg == NULL) { - return 0; + return ERROR; } PyObject *loc_obj = PyErr_ProgramTextObject(c->c_filename, loc.lineno); if (loc_obj == NULL) { - Py_INCREF(Py_None); - loc_obj = Py_None; + loc_obj = Py_NewRef(Py_None); } PyObject *args = Py_BuildValue("O(OiiOii)", msg, c->c_filename, loc.lineno, loc.col_offset + 1, loc_obj, @@ -6193,7 +6240,7 @@ compiler_error(struct compiler *c, location loc, exit: Py_DECREF(loc_obj); Py_XDECREF(args); - return 0; + return ERROR; } /* Emits a SyntaxWarning and returns 1 on success. @@ -6209,7 +6256,7 @@ compiler_warn(struct compiler *c, location loc, PyObject *msg = PyUnicode_FromFormatV(format, vargs); va_end(vargs); if (msg == NULL) { - return 0; + return ERROR; } if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, c->c_filename, loc.lineno, NULL, NULL) < 0) @@ -6222,10 +6269,10 @@ compiler_warn(struct compiler *c, location loc, compiler_error(c, loc, PyUnicode_AsUTF8(msg)); } Py_DECREF(msg); - return 0; + return ERROR; } Py_DECREF(msg); - return 1; + return SUCCESS; } static int @@ -6236,19 +6283,13 @@ compiler_subscript(struct compiler *c, expr_ty e) int op = 0; if (ctx == Load) { - if (!check_subscripter(c, e->v.Subscript.value)) { - return 0; - } - if (!check_index(c, e->v.Subscript.value, e->v.Subscript.slice)) { - return 0; - } + RETURN_IF_ERROR(check_subscripter(c, e->v.Subscript.value)); + RETURN_IF_ERROR(check_index(c, e->v.Subscript.value, e->v.Subscript.slice)); } VISIT(c, expr, e->v.Subscript.value); if (is_two_element_slice(e->v.Subscript.slice) && ctx != Del) { - if (!compiler_slice(c, e->v.Subscript.slice)) { - return 0; - } + RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice)); if (ctx == Load) { ADDOP(c, loc, BINARY_SLICE); } @@ -6267,11 +6308,11 @@ compiler_subscript(struct compiler *c, expr_ty e) assert(op); ADDOP(c, loc, op); } - return 1; + return SUCCESS; } /* Returns the number of the values emitted, - * thus are needed to build the slice, or 0 if there is an error. */ + * thus are needed to build the slice, or -1 if there is an error. */ static int compiler_slice(struct compiler *c, expr_ty s) { @@ -6329,20 +6370,20 @@ ensure_fail_pop(struct compiler *c, pattern_context *pc, Py_ssize_t n) { Py_ssize_t size = n + 1; if (size <= pc->fail_pop_size) { - return 1; + return SUCCESS; } Py_ssize_t needed = sizeof(jump_target_label) * size; jump_target_label *resized = PyObject_Realloc(pc->fail_pop, needed); if (resized == NULL) { PyErr_NoMemory(); - return 0; + return ERROR; } pc->fail_pop = resized; while (pc->fail_pop_size < size) { NEW_JUMP_TARGET_LABEL(c, new_block); pc->fail_pop[pc->fail_pop_size++] = new_block; } - return 1; + return SUCCESS; } // Use op to jump to the correct fail_pop block. @@ -6353,9 +6394,9 @@ jump_to_fail_pop(struct compiler *c, location loc, // Pop any items on the top of the stack, plus any objects we were going to // capture on success: Py_ssize_t pops = pc->on_top + PyList_GET_SIZE(pc->stores); - RETURN_IF_FALSE(ensure_fail_pop(c, pc, pops)); + RETURN_IF_ERROR(ensure_fail_pop(c, pc, pops)); ADDOP_JUMP(c, loc, op, pc->fail_pop[pops]); - return 1; + return SUCCESS; } // Build all of the fail_pop blocks and reset fail_pop. @@ -6365,21 +6406,21 @@ emit_and_reset_fail_pop(struct compiler *c, location loc, { if (!pc->fail_pop_size) { assert(pc->fail_pop == NULL); - return 1; + return SUCCESS; } while (--pc->fail_pop_size) { USE_LABEL(c, pc->fail_pop[pc->fail_pop_size]); - if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, loc)) { + if (cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, loc) < 0) { pc->fail_pop_size = 0; PyObject_Free(pc->fail_pop); pc->fail_pop = NULL; - return 0; + return ERROR; } } USE_LABEL(c, pc->fail_pop[0]); PyObject_Free(pc->fail_pop); pc->fail_pop = NULL; - return 1; + return SUCCESS; } static int @@ -6396,7 +6437,7 @@ pattern_helper_rotate(struct compiler *c, location loc, Py_ssize_t count) while (1 < count) { ADDOP_I(c, loc, SWAP, count--); } - return 1; + return SUCCESS; } static int @@ -6405,23 +6446,22 @@ pattern_helper_store_name(struct compiler *c, location loc, { if (n == NULL) { ADDOP(c, loc, POP_TOP); - return 1; + return SUCCESS; } if (forbidden_name(c, loc, n, Store)) { - return 0; + return ERROR; } // Can't assign to the same name twice: int duplicate = PySequence_Contains(pc->stores, n); - if (duplicate < 0) { - return 0; - } + RETURN_IF_ERROR(duplicate); if (duplicate) { return compiler_error_duplicate_store(c, loc, n); } // Rotate this object underneath any items we need to preserve: Py_ssize_t rotations = pc->on_top + PyList_GET_SIZE(pc->stores) + 1; - RETURN_IF_FALSE(pattern_helper_rotate(c, loc, rotations)); - return !PyList_Append(pc->stores, n); + RETURN_IF_ERROR(pattern_helper_rotate(c, loc, rotations)); + RETURN_IF_ERROR(PyList_Append(pc->stores, n)); + return SUCCESS; } @@ -6435,10 +6475,11 @@ pattern_unpack_helper(struct compiler *c, location loc, pattern_ty elt = asdl_seq_GET(elts, i); if (elt->kind == MatchStar_kind && !seen_star) { if ((i >= (1 << 8)) || - (n-i-1 >= (INT_MAX >> 8))) + (n-i-1 >= (INT_MAX >> 8))) { return compiler_error(c, loc, "too many expressions in " "star-unpacking sequence pattern"); + } ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8))); seen_star = 1; } @@ -6450,15 +6491,15 @@ pattern_unpack_helper(struct compiler *c, location loc, if (!seen_star) { ADDOP_I(c, loc, UNPACK_SEQUENCE, n); } - return 1; + return SUCCESS; } static int -pattern_helper_sequence_unpack(struct compiler *c, location *ploc, +pattern_helper_sequence_unpack(struct compiler *c, location loc, asdl_pattern_seq *patterns, Py_ssize_t star, pattern_context *pc) { - RETURN_IF_FALSE(pattern_unpack_helper(c, *ploc, patterns)); + RETURN_IF_ERROR(pattern_unpack_helper(c, loc, patterns)); Py_ssize_t size = asdl_seq_LEN(patterns); // We've now got a bunch of new subjects on the stack. They need to remain // there after each subpattern match: @@ -6467,16 +6508,16 @@ pattern_helper_sequence_unpack(struct compiler *c, location *ploc, // One less item to keep track of each time we loop through: pc->on_top--; pattern_ty pattern = asdl_seq_GET(patterns, i); - RETURN_IF_FALSE(compiler_pattern_subpattern(c, ploc, pattern, pc)); + RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc)); } - return 1; + return SUCCESS; } // Like pattern_helper_sequence_unpack, but uses BINARY_SUBSCR instead of // UNPACK_SEQUENCE / UNPACK_EX. This is more efficient for patterns with a // starred wildcard like [first, *_] / [first, *_, last] / [*_, last] / etc. static int -pattern_helper_sequence_subscr(struct compiler *c, location *ploc, +pattern_helper_sequence_subscr(struct compiler *c, location loc, asdl_pattern_seq *patterns, Py_ssize_t star, pattern_context *pc) { @@ -6492,41 +6533,40 @@ pattern_helper_sequence_subscr(struct compiler *c, location *ploc, assert(WILDCARD_STAR_CHECK(pattern)); continue; } - ADDOP_I(c, *ploc, COPY, 1); + ADDOP_I(c, loc, COPY, 1); if (i < star) { - ADDOP_LOAD_CONST_NEW(c, *ploc, PyLong_FromSsize_t(i)); + ADDOP_LOAD_CONST_NEW(c, loc, PyLong_FromSsize_t(i)); } else { // The subject may not support negative indexing! Compute a // nonnegative index: - ADDOP(c, *ploc, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, *ploc, PyLong_FromSsize_t(size - i)); - ADDOP_BINARY(c, *ploc, Sub); + ADDOP(c, loc, GET_LEN); + ADDOP_LOAD_CONST_NEW(c, loc, PyLong_FromSsize_t(size - i)); + ADDOP_BINARY(c, loc, Sub); } - ADDOP(c, *ploc, BINARY_SUBSCR); - RETURN_IF_FALSE(compiler_pattern_subpattern(c, ploc, pattern, pc)); + ADDOP(c, loc, BINARY_SUBSCR); + RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc)); } // Pop the subject, we're done with it: pc->on_top--; - ADDOP(c, *ploc, POP_TOP); - return 1; + ADDOP(c, loc, POP_TOP); + return SUCCESS; } // Like compiler_pattern, but turn off checks for irrefutability. static int -compiler_pattern_subpattern(struct compiler *c, location *ploc, +compiler_pattern_subpattern(struct compiler *c, pattern_ty p, pattern_context *pc) { int allow_irrefutable = pc->allow_irrefutable; pc->allow_irrefutable = 1; - RETURN_IF_FALSE(compiler_pattern(c, ploc, p, pc)); + RETURN_IF_ERROR(compiler_pattern(c, p, pc)); pc->allow_irrefutable = allow_irrefutable; - return 1; + return SUCCESS; } static int -compiler_pattern_as(struct compiler *c, location *ploc, - pattern_ty p, pattern_context *pc) +compiler_pattern_as(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchAs_kind); if (p->v.MatchAs.pattern == NULL) { @@ -6534,30 +6574,30 @@ compiler_pattern_as(struct compiler *c, location *ploc, if (!pc->allow_irrefutable) { if (p->v.MatchAs.name) { const char *e = "name capture %R makes remaining patterns unreachable"; - return compiler_error(c, *ploc, e, p->v.MatchAs.name); + return compiler_error(c, LOC(p), e, p->v.MatchAs.name); } const char *e = "wildcard makes remaining patterns unreachable"; - return compiler_error(c, *ploc, e); + return compiler_error(c, LOC(p), e); } - return pattern_helper_store_name(c, *ploc, p->v.MatchAs.name, pc); + return pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc); } // Need to make a copy for (possibly) storing later: pc->on_top++; - ADDOP_I(c, *ploc, COPY, 1); - RETURN_IF_FALSE(compiler_pattern(c, ploc, p->v.MatchAs.pattern, pc)); + ADDOP_I(c, LOC(p), COPY, 1); + RETURN_IF_ERROR(compiler_pattern(c, p->v.MatchAs.pattern, pc)); // Success! Store it: pc->on_top--; - RETURN_IF_FALSE(pattern_helper_store_name(c, *ploc, p->v.MatchAs.name, pc)); - return 1; + RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc)); + return SUCCESS; } static int compiler_pattern_star(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchStar_kind); - location loc = LOC(p); - RETURN_IF_FALSE(pattern_helper_store_name(c, loc, p->v.MatchStar.name, pc)); - return 1; + RETURN_IF_ERROR( + pattern_helper_store_name(c, LOC(p), p->v.MatchStar.name, pc)); + return SUCCESS; } static int @@ -6568,27 +6608,24 @@ validate_kwd_attrs(struct compiler *c, asdl_identifier_seq *attrs, asdl_pattern_ Py_ssize_t nattrs = asdl_seq_LEN(attrs); for (Py_ssize_t i = 0; i < nattrs; i++) { identifier attr = ((identifier)asdl_seq_GET(attrs, i)); - SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, i))); location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i)); if (forbidden_name(c, loc, attr, Store)) { - return -1; + return ERROR; } for (Py_ssize_t j = i + 1; j < nattrs; j++) { identifier other = ((identifier)asdl_seq_GET(attrs, j)); if (!PyUnicode_Compare(attr, other)) { location loc = LOC((pattern_ty) asdl_seq_GET(patterns, j)); - SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, j))); compiler_error(c, loc, "attribute name repeated in class pattern: %U", attr); - return -1; + return ERROR; } } } - return 0; + return SUCCESS; } static int -compiler_pattern_class(struct compiler *c, location *ploc, - pattern_ty p, pattern_context *pc) +compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchClass_kind); asdl_pattern_seq *patterns = p->v.MatchClass.patterns; @@ -6601,34 +6638,34 @@ compiler_pattern_class(struct compiler *c, location *ploc, // AST validator shouldn't let this happen, but if it does, // just fail, don't crash out of the interpreter const char * e = "kwd_attrs (%d) / kwd_patterns (%d) length mismatch in class pattern"; - return compiler_error(c, *ploc, e, nattrs, nkwd_patterns); + return compiler_error(c, LOC(p), e, nattrs, nkwd_patterns); } if (INT_MAX < nargs || INT_MAX < nargs + nattrs - 1) { const char *e = "too many sub-patterns in class pattern %R"; - return compiler_error(c, *ploc, e, p->v.MatchClass.cls); + return compiler_error(c, LOC(p), e, p->v.MatchClass.cls); } if (nattrs) { - RETURN_IF_FALSE(!validate_kwd_attrs(c, kwd_attrs, kwd_patterns)); - SET_LOC(c, p); + RETURN_IF_ERROR(validate_kwd_attrs(c, kwd_attrs, kwd_patterns)); } VISIT(c, expr, p->v.MatchClass.cls); - PyObject *attr_names; - RETURN_IF_FALSE(attr_names = PyTuple_New(nattrs)); + PyObject *attr_names = PyTuple_New(nattrs); + if (attr_names == NULL) { + return ERROR; + } Py_ssize_t i; for (i = 0; i < nattrs; i++) { PyObject *name = asdl_seq_GET(kwd_attrs, i); - Py_INCREF(name); - PyTuple_SET_ITEM(attr_names, i, name); - } - ADDOP_LOAD_CONST_NEW(c, *ploc, attr_names); - ADDOP_I(c, *ploc, MATCH_CLASS, nargs); - ADDOP_I(c, *ploc, COPY, 1); - ADDOP_LOAD_CONST(c, *ploc, Py_None); - ADDOP_I(c, *ploc, IS_OP, 1); + PyTuple_SET_ITEM(attr_names, i, Py_NewRef(name)); + } + ADDOP_LOAD_CONST_NEW(c, LOC(p), attr_names); + ADDOP_I(c, LOC(p), MATCH_CLASS, nargs); + ADDOP_I(c, LOC(p), COPY, 1); + ADDOP_LOAD_CONST(c, LOC(p), Py_None); + ADDOP_I(c, LOC(p), IS_OP, 1); // TOS is now a tuple of (nargs + nattrs) attributes (or None): pc->on_top++; - RETURN_IF_FALSE(jump_to_fail_pop(c, *ploc, pc, POP_JUMP_IF_FALSE)); - ADDOP_I(c, *ploc, UNPACK_SEQUENCE, nargs + nattrs); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); + ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, nargs + nattrs); pc->on_top += nargs + nattrs - 1; for (i = 0; i < nargs + nattrs; i++) { pc->on_top--; @@ -6642,19 +6679,18 @@ compiler_pattern_class(struct compiler *c, location *ploc, pattern = asdl_seq_GET(kwd_patterns, i - nargs); } if (WILDCARD_CHECK(pattern)) { - ADDOP(c, *ploc, POP_TOP); + ADDOP(c, LOC(p), POP_TOP); continue; } - *ploc = LOC(pattern); - RETURN_IF_FALSE(compiler_pattern_subpattern(c, ploc, pattern, pc)); + RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc)); } // Success! Pop the tuple of attributes: - return 1; + return SUCCESS; } static int -compiler_pattern_mapping(struct compiler *c, location *ploc, - pattern_ty p, pattern_context *pc) +compiler_pattern_mapping(struct compiler *c, pattern_ty p, + pattern_context *pc) { assert(p->kind == MatchMapping_kind); asdl_expr_seq *keys = p->v.MatchMapping.keys; @@ -6665,29 +6701,29 @@ compiler_pattern_mapping(struct compiler *c, location *ploc, // AST validator shouldn't let this happen, but if it does, // just fail, don't crash out of the interpreter const char * e = "keys (%d) / patterns (%d) length mismatch in mapping pattern"; - return compiler_error(c, *ploc, e, size, npatterns); + return compiler_error(c, LOC(p), e, size, npatterns); } // We have a double-star target if "rest" is set PyObject *star_target = p->v.MatchMapping.rest; // We need to keep the subject on top during the mapping and length checks: pc->on_top++; - ADDOP(c, *ploc, MATCH_MAPPING); - RETURN_IF_FALSE(jump_to_fail_pop(c, *ploc, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), MATCH_MAPPING); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); if (!size && !star_target) { // If the pattern is just "{}", we're done! Pop the subject: pc->on_top--; - ADDOP(c, *ploc, POP_TOP); - return 1; + ADDOP(c, LOC(p), POP_TOP); + return SUCCESS; } if (size) { // If the pattern has any keys in it, perform a length check: - ADDOP(c, *ploc, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, *ploc, PyLong_FromSsize_t(size)); - ADDOP_COMPARE(c, *ploc, GtE); - RETURN_IF_FALSE(jump_to_fail_pop(c, *ploc, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), GET_LEN); + ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size)); + ADDOP_COMPARE(c, LOC(p), GtE); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); } if (INT_MAX < size - 1) { - return compiler_error(c, *ploc, "too many sub-patterns in mapping pattern"); + return compiler_error(c, LOC(p), "too many sub-patterns in mapping pattern"); } // Collect all of the keys into a tuple for MATCH_KEYS and // **rest. They can either be dotted names or literals: @@ -6696,7 +6732,7 @@ compiler_pattern_mapping(struct compiler *c, location *ploc, // SyntaxError in the case of duplicates. PyObject *seen = PySet_New(NULL); if (seen == NULL) { - return 0; + return ERROR; } // NOTE: goto error on failure in the loop below to avoid leaking `seen` @@ -6706,7 +6742,6 @@ compiler_pattern_mapping(struct compiler *c, location *ploc, const char *e = "can't use NULL keys in MatchMapping " "(set 'rest' parameter instead)"; location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i)); - SET_LOC(c, (pattern_ty) asdl_seq_GET(patterns, i)); compiler_error(c, loc, e); goto error; } @@ -6718,7 +6753,7 @@ compiler_pattern_mapping(struct compiler *c, location *ploc, } if (in_seen) { const char *e = "mapping pattern checks duplicate key (%R)"; - compiler_error(c, *ploc, e, key->v.Constant.value); + compiler_error(c, LOC(p), e, key->v.Constant.value); goto error; } if (PySet_Add(seen, key->v.Constant.value)) { @@ -6728,10 +6763,10 @@ compiler_pattern_mapping(struct compiler *c, location *ploc, else if (key->kind != Attribute_kind) { const char *e = "mapping pattern keys may only match literals and attribute lookups"; - compiler_error(c, *ploc, e); + compiler_error(c, LOC(p), e); goto error; } - if (!compiler_visit_expr(c, key)) { + if (compiler_visit_expr(c, key) < 0) { goto error; } } @@ -6739,22 +6774,22 @@ compiler_pattern_mapping(struct compiler *c, location *ploc, // all keys have been checked; there are no duplicates Py_DECREF(seen); - ADDOP_I(c, *ploc, BUILD_TUPLE, size); - ADDOP(c, *ploc, MATCH_KEYS); + ADDOP_I(c, LOC(p), BUILD_TUPLE, size); + ADDOP(c, LOC(p), MATCH_KEYS); // There's now a tuple of keys and a tuple of values on top of the subject: pc->on_top += 2; - ADDOP_I(c, *ploc, COPY, 1); - ADDOP_LOAD_CONST(c, *ploc, Py_None); - ADDOP_I(c, *ploc, IS_OP, 1); - RETURN_IF_FALSE(jump_to_fail_pop(c, *ploc, pc, POP_JUMP_IF_FALSE)); + ADDOP_I(c, LOC(p), COPY, 1); + ADDOP_LOAD_CONST(c, LOC(p), Py_None); + ADDOP_I(c, LOC(p), IS_OP, 1); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); // So far so good. Use that tuple of values on the stack to match // sub-patterns against: - ADDOP_I(c, *ploc, UNPACK_SEQUENCE, size); + ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size); pc->on_top += size - 1; for (Py_ssize_t i = 0; i < size; i++) { pc->on_top--; pattern_ty pattern = asdl_seq_GET(patterns, i); - RETURN_IF_FALSE(compiler_pattern_subpattern(c, ploc, pattern, pc)); + RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc)); } // If we get this far, it's a match! Whatever happens next should consume // the tuple of keys and the subject: @@ -6766,31 +6801,30 @@ compiler_pattern_mapping(struct compiler *c, location *ploc, // rest = dict(TOS1) // for key in TOS: // del rest[key] - ADDOP_I(c, *ploc, BUILD_MAP, 0); // [subject, keys, empty] - ADDOP_I(c, *ploc, SWAP, 3); // [empty, keys, subject] - ADDOP_I(c, *ploc, DICT_UPDATE, 2); // [copy, keys] - ADDOP_I(c, *ploc, UNPACK_SEQUENCE, size); // [copy, keys...] + ADDOP_I(c, LOC(p), BUILD_MAP, 0); // [subject, keys, empty] + ADDOP_I(c, LOC(p), SWAP, 3); // [empty, keys, subject] + ADDOP_I(c, LOC(p), DICT_UPDATE, 2); // [copy, keys] + ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size); // [copy, keys...] while (size) { - ADDOP_I(c, *ploc, COPY, 1 + size--); // [copy, keys..., copy] - ADDOP_I(c, *ploc, SWAP, 2); // [copy, keys..., copy, key] - ADDOP(c, *ploc, DELETE_SUBSCR); // [copy, keys...] + ADDOP_I(c, LOC(p), COPY, 1 + size--); // [copy, keys..., copy] + ADDOP_I(c, LOC(p), SWAP, 2); // [copy, keys..., copy, key] + ADDOP(c, LOC(p), DELETE_SUBSCR); // [copy, keys...] } - RETURN_IF_FALSE(pattern_helper_store_name(c, *ploc, star_target, pc)); + RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), star_target, pc)); } else { - ADDOP(c, *ploc, POP_TOP); // Tuple of keys. - ADDOP(c, *ploc, POP_TOP); // Subject. + ADDOP(c, LOC(p), POP_TOP); // Tuple of keys. + ADDOP(c, LOC(p), POP_TOP); // Subject. } - return 1; + return SUCCESS; error: Py_DECREF(seen); - return 0; + return ERROR; } static int -compiler_pattern_or(struct compiler *c, location *ploc, - pattern_ty p, pattern_context *pc) +compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchOr_kind); NEW_JUMP_TARGET_LABEL(c, end); @@ -6806,7 +6840,6 @@ compiler_pattern_or(struct compiler *c, location *ploc, // NOTE: We can't use returning macros anymore! goto error on error. for (Py_ssize_t i = 0; i < size; i++) { pattern_ty alt = asdl_seq_GET(p->v.MatchOr.patterns, i); - SET_LOC(c, alt); PyObject *pc_stores = PyList_New(0); if (pc_stores == NULL) { goto error; @@ -6817,9 +6850,8 @@ compiler_pattern_or(struct compiler *c, location *ploc, pc->fail_pop = NULL; pc->fail_pop_size = 0; pc->on_top = 0; - *ploc = LOC(alt); - if (!cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, *ploc) || - !compiler_pattern(c, ploc, alt, pc)) { + if (cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, LOC(alt)) < 0 || + compiler_pattern(c, alt, pc) < 0) { goto error; } // Success! @@ -6829,8 +6861,7 @@ compiler_pattern_or(struct compiler *c, location *ploc, // for the others (they can't bind a different set of names, and // might need to be reordered): assert(control == NULL); - control = pc->stores; - Py_INCREF(control); + control = Py_NewRef(pc->stores); } else if (nstores != PyList_GET_SIZE(control)) { goto diff; @@ -6874,7 +6905,7 @@ compiler_pattern_or(struct compiler *c, location *ploc, // Do the same thing to the stack, using several // rotations: while (rotations--) { - if (!pattern_helper_rotate(c, *ploc, icontrol + 1)){ + if (pattern_helper_rotate(c, LOC(alt), icontrol + 1) < 0) { goto error; } } @@ -6882,8 +6913,8 @@ compiler_pattern_or(struct compiler *c, location *ploc, } } assert(control); - if (!cfg_builder_addop_j(CFG_BUILDER(c), *ploc, JUMP, end) || - !emit_and_reset_fail_pop(c, *ploc, pc)) + if (cfg_builder_addop_j(CFG_BUILDER(c), LOC(alt), JUMP, end) < 0 || + emit_and_reset_fail_pop(c, LOC(alt), pc) < 0) { goto error; } @@ -6894,8 +6925,8 @@ compiler_pattern_or(struct compiler *c, location *ploc, // Need to NULL this for the PyObject_Free call in the error block. old_pc.fail_pop = NULL; // No match. Pop the remaining copy of the subject and fail: - if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, *ploc) || - !jump_to_fail_pop(c, *ploc, pc, JUMP)) { + if (cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, LOC(p)) < 0 || + jump_to_fail_pop(c, LOC(p), pc, JUMP) < 0) { goto error; } @@ -6910,7 +6941,7 @@ compiler_pattern_or(struct compiler *c, location *ploc, Py_ssize_t nrots = nstores + 1 + pc->on_top + PyList_GET_SIZE(pc->stores); for (Py_ssize_t i = 0; i < nstores; i++) { // Rotate this capture to its proper place on the stack: - if (!pattern_helper_rotate(c, *ploc, nrots)) { + if (pattern_helper_rotate(c, LOC(p), nrots) < 0) { goto error; } // Update the list of previous stores with this new name, checking for @@ -6921,7 +6952,7 @@ compiler_pattern_or(struct compiler *c, location *ploc, goto error; } if (dupe) { - compiler_error_duplicate_store(c, *ploc, name); + compiler_error_duplicate_store(c, LOC(p), name); goto error; } if (PyList_Append(pc->stores, name)) { @@ -6932,21 +6963,21 @@ compiler_pattern_or(struct compiler *c, location *ploc, Py_DECREF(control); // NOTE: Returning macros are safe again. // Pop the copy of the subject: - ADDOP(c, *ploc, POP_TOP); - return 1; + ADDOP(c, LOC(p), POP_TOP); + return SUCCESS; diff: - compiler_error(c, *ploc, "alternative patterns bind different names"); + compiler_error(c, LOC(p), "alternative patterns bind different names"); error: PyObject_Free(old_pc.fail_pop); Py_DECREF(old_pc.stores); Py_XDECREF(control); - return 0; + return ERROR; } static int -compiler_pattern_sequence(struct compiler *c, location *ploc, - pattern_ty p, pattern_context *pc) +compiler_pattern_sequence(struct compiler *c, pattern_ty p, + pattern_context *pc) { assert(p->kind == MatchSequence_kind); asdl_pattern_seq *patterns = p->v.MatchSequence.patterns; @@ -6960,7 +6991,7 @@ compiler_pattern_sequence(struct compiler *c, location *ploc, if (pattern->kind == MatchStar_kind) { if (star >= 0) { const char *e = "multiple starred names in sequence pattern"; - return compiler_error(c, *ploc, e); + return compiler_error(c, LOC(p), e); } star_wildcard = WILDCARD_STAR_CHECK(pattern); only_wildcard &= star_wildcard; @@ -6971,92 +7002,87 @@ compiler_pattern_sequence(struct compiler *c, location *ploc, } // We need to keep the subject on top during the sequence and length checks: pc->on_top++; - ADDOP(c, *ploc, MATCH_SEQUENCE); - RETURN_IF_FALSE(jump_to_fail_pop(c, *ploc, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), MATCH_SEQUENCE); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); if (star < 0) { // No star: len(subject) == size - ADDOP(c, *ploc, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, *ploc, PyLong_FromSsize_t(size)); - ADDOP_COMPARE(c, *ploc, Eq); - RETURN_IF_FALSE(jump_to_fail_pop(c, *ploc, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), GET_LEN); + ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size)); + ADDOP_COMPARE(c, LOC(p), Eq); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); } else if (size > 1) { // Star: len(subject) >= size - 1 - ADDOP(c, *ploc, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, *ploc, PyLong_FromSsize_t(size - 1)); - ADDOP_COMPARE(c, *ploc, GtE); - RETURN_IF_FALSE(jump_to_fail_pop(c, *ploc, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), GET_LEN); + ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size - 1)); + ADDOP_COMPARE(c, LOC(p), GtE); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); } // Whatever comes next should consume the subject: pc->on_top--; if (only_wildcard) { // Patterns like: [] / [_] / [_, _] / [*_] / [_, *_] / [_, _, *_] / etc. - ADDOP(c, *ploc, POP_TOP); + ADDOP(c, LOC(p), POP_TOP); } else if (star_wildcard) { - RETURN_IF_FALSE(pattern_helper_sequence_subscr(c, ploc, patterns, star, pc)); + RETURN_IF_ERROR(pattern_helper_sequence_subscr(c, LOC(p), patterns, star, pc)); } else { - RETURN_IF_FALSE(pattern_helper_sequence_unpack(c, ploc, patterns, star, pc)); + RETURN_IF_ERROR(pattern_helper_sequence_unpack(c, LOC(p), patterns, star, pc)); } - return 1; + return SUCCESS; } static int compiler_pattern_value(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchValue_kind); - location loc = LOC(p); expr_ty value = p->v.MatchValue.value; if (!MATCH_VALUE_EXPR(value)) { const char *e = "patterns may only match literals and attribute lookups"; - return compiler_error(c, loc, e); + return compiler_error(c, LOC(p), e); } VISIT(c, expr, value); - ADDOP_COMPARE(c, loc, Eq); - RETURN_IF_FALSE(jump_to_fail_pop(c, loc, pc, POP_JUMP_IF_FALSE)); - return 1; + ADDOP_COMPARE(c, LOC(p), Eq); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); + return SUCCESS; } static int compiler_pattern_singleton(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchSingleton_kind); - location loc = LOC(p); - ADDOP_LOAD_CONST(c, loc, p->v.MatchSingleton.value); - ADDOP_COMPARE(c, loc, Is); - RETURN_IF_FALSE(jump_to_fail_pop(c, loc, pc, POP_JUMP_IF_FALSE)); - return 1; + ADDOP_LOAD_CONST(c, LOC(p), p->v.MatchSingleton.value); + ADDOP_COMPARE(c, LOC(p), Is); + RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); + return SUCCESS; } static int -compiler_pattern(struct compiler *c, location *ploc, - pattern_ty p, pattern_context *pc) +compiler_pattern(struct compiler *c, pattern_ty p, pattern_context *pc) { - SET_LOC(c, p); - *ploc = LOC(p); switch (p->kind) { case MatchValue_kind: return compiler_pattern_value(c, p, pc); case MatchSingleton_kind: return compiler_pattern_singleton(c, p, pc); case MatchSequence_kind: - return compiler_pattern_sequence(c, ploc, p, pc); + return compiler_pattern_sequence(c, p, pc); case MatchMapping_kind: - return compiler_pattern_mapping(c, ploc, p, pc); + return compiler_pattern_mapping(c, p, pc); case MatchClass_kind: - return compiler_pattern_class(c, ploc, p, pc); + return compiler_pattern_class(c, p, pc); case MatchStar_kind: return compiler_pattern_star(c, p, pc); case MatchAs_kind: - return compiler_pattern_as(c, ploc, p, pc); + return compiler_pattern_as(c, p, pc); case MatchOr_kind: - return compiler_pattern_or(c, ploc, p, pc); + return compiler_pattern_or(c, p, pc); } // AST validator shouldn't let this happen, but if it does, // just fail, don't crash out of the interpreter const char *e = "invalid match pattern node in AST (kind=%d)"; - return compiler_error(c, *ploc, e, p->kind); + return compiler_error(c, LOC(p), e, p->kind); } static int @@ -7070,72 +7096,70 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc) int has_default = WILDCARD_CHECK(m->pattern) && 1 < cases; for (Py_ssize_t i = 0; i < cases - has_default; i++) { m = asdl_seq_GET(s->v.Match.cases, i); - SET_LOC(c, m->pattern); // Only copy the subject if we're *not* on the last case: - location loc = LOC(m->pattern); if (i != cases - has_default - 1) { - ADDOP_I(c, loc, COPY, 1); + ADDOP_I(c, LOC(m->pattern), COPY, 1); + } + pc->stores = PyList_New(0); + if (pc->stores == NULL) { + return ERROR; } - RETURN_IF_FALSE(pc->stores = PyList_New(0)); // Irrefutable cases must be either guarded, last, or both: pc->allow_irrefutable = m->guard != NULL || i == cases - 1; pc->fail_pop = NULL; pc->fail_pop_size = 0; pc->on_top = 0; // NOTE: Can't use returning macros here (they'll leak pc->stores)! - if (!compiler_pattern(c, &loc, m->pattern, pc)) { + if (compiler_pattern(c, m->pattern, pc) < 0) { Py_DECREF(pc->stores); - return 0; + return ERROR; } assert(!pc->on_top); // It's a match! Store all of the captured names (they're on the stack). Py_ssize_t nstores = PyList_GET_SIZE(pc->stores); for (Py_ssize_t n = 0; n < nstores; n++) { PyObject *name = PyList_GET_ITEM(pc->stores, n); - if (!compiler_nameop(c, loc, name, Store)) { + if (compiler_nameop(c, LOC(m->pattern), name, Store) < 0) { Py_DECREF(pc->stores); - return 0; + return ERROR; } } Py_DECREF(pc->stores); // NOTE: Returning macros are safe again. if (m->guard) { - RETURN_IF_FALSE(ensure_fail_pop(c, pc, 0)); - RETURN_IF_FALSE(compiler_jump_if(c, loc, m->guard, pc->fail_pop[0], 0)); + RETURN_IF_ERROR(ensure_fail_pop(c, pc, 0)); + RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, pc->fail_pop[0], 0)); } // Success! Pop the subject off, we're done with it: if (i != cases - has_default - 1) { - ADDOP(c, loc, POP_TOP); + ADDOP(c, LOC(m->pattern), POP_TOP); } VISIT_SEQ(c, stmt, m->body); ADDOP_JUMP(c, NO_LOCATION, JUMP, end); // If the pattern fails to match, we want the line number of the // cleanup to be associated with the failed pattern, not the last line // of the body - SET_LOC(c, m->pattern); - RETURN_IF_FALSE(emit_and_reset_fail_pop(c, LOC(m->pattern), pc)); + RETURN_IF_ERROR(emit_and_reset_fail_pop(c, LOC(m->pattern), pc)); } if (has_default) { // A trailing "case _" is common, and lets us save a bit of redundant // pushing and popping in the loop above: m = asdl_seq_GET(s->v.Match.cases, cases - 1); - SET_LOC(c, m->pattern); - location loc = LOC(m->pattern); if (cases == 1) { // No matches. Done with the subject: - ADDOP(c, loc, POP_TOP); + ADDOP(c, LOC(m->pattern), POP_TOP); } else { // Show line coverage for default case (it doesn't create bytecode) - ADDOP(c, loc, NOP); + ADDOP(c, LOC(m->pattern), NOP); } if (m->guard) { - RETURN_IF_FALSE(compiler_jump_if(c, loc, m->guard, end, 0)); + RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, end, 0)); } VISIT_SEQ(c, stmt, m->body); } USE_LABEL(c, end); - return 1; + return SUCCESS; } static int @@ -7284,12 +7308,12 @@ assemble_init(struct assembler *a, int firstlineno) if (a->a_except_table == NULL) { goto error; } - return 1; + return 0; error: Py_XDECREF(a->a_bytecode); Py_XDECREF(a->a_linetable); Py_XDECREF(a->a_except_table); - return 0; + return -1; } static void @@ -7569,7 +7593,8 @@ push_cold_blocks_to_end(cfg_builder *g, int code_flags) { if (explicit_jump == NULL) { return -1; } - basicblock_addop(explicit_jump, JUMP, b->b_next->b_label, NO_LOCATION); + basicblock_addop(explicit_jump, JUMP, b->b_next->b_label, NO_LOCATION, + UNUSED_OPARG, UNUSED_OPARG, UNUSED_OPARG); explicit_jump->b_cold = 1; explicit_jump->b_next = b->b_next; b->b_next = explicit_jump; @@ -7634,7 +7659,7 @@ convert_exception_handlers_to_nops(basicblock *entryblock) { for (int i = 0; i < b->b_iused; i++) { struct instr *instr = &b->b_instr[i]; if (is_block_push(instr) || instr->i_opcode == POP_BLOCK) { - instr->i_opcode = NOP; + INSTR_SET_OP0(instr, NOP); } } } @@ -7683,8 +7708,9 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas { Py_ssize_t len = PyBytes_GET_SIZE(a->a_except_table); if (a->a_except_table_off + MAX_SIZE_OF_ENTRY >= len) { - if (_PyBytes_Resize(&a->a_except_table, len * 2) < 0) - return 0; + if (_PyBytes_Resize(&a->a_except_table, len * 2) < 0) { + return -1; + } } int size = end-start; assert(end > start); @@ -7699,7 +7725,7 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas assemble_emit_exception_table_item(a, size, 0); assemble_emit_exception_table_item(a, target, 0); assemble_emit_exception_table_item(a, depth_lasti, 0); - return 1; + return 0; } static int @@ -7715,7 +7741,9 @@ assemble_exception_table(struct assembler *a, basicblock *entryblock) struct instr *instr = &b->b_instr[i]; if (instr->i_except != handler) { if (handler != NULL) { - RETURN_IF_FALSE(assemble_emit_exception_table_entry(a, start, ioffset, handler)); + if (assemble_emit_exception_table_entry(a, start, ioffset, handler) < 0) { + return -1; + } } start = ioffset; handler = instr->i_except; @@ -7724,9 +7752,11 @@ assemble_exception_table(struct assembler *a, basicblock *entryblock) } } if (handler != NULL) { - RETURN_IF_FALSE(assemble_emit_exception_table_entry(a, start, ioffset, handler)); + if (assemble_emit_exception_table_entry(a, start, ioffset, handler) < 0) { + return -1; + } } - return 1; + return 0; } /* Code location emitting code. See locations.md for a description of the format. */ @@ -7829,12 +7859,12 @@ write_location_info_entry(struct assembler* a, struct instr* i, int isize) if (a->a_location_off + THEORETICAL_MAX_ENTRY_SIZE >= len) { assert(len > THEORETICAL_MAX_ENTRY_SIZE); if (_PyBytes_Resize(&a->a_linetable, len*2) < 0) { - return 0; + return -1; } } if (i->i_loc.lineno < 0) { write_location_info_none(a, isize); - return 1; + return 0; } int line_delta = i->i_loc.lineno - a->a_lineno; int column = i->i_loc.col_offset; @@ -7845,23 +7875,23 @@ write_location_info_entry(struct assembler* a, struct instr* i, int isize) if (i->i_loc.end_lineno == i->i_loc.lineno || i->i_loc.end_lineno == -1) { write_location_info_no_column(a, isize, line_delta); a->a_lineno = i->i_loc.lineno; - return 1; + return 0; } } else if (i->i_loc.end_lineno == i->i_loc.lineno) { if (line_delta == 0 && column < 80 && end_column - column < 16 && end_column >= column) { write_location_info_short_form(a, isize, column, end_column); - return 1; + return 0; } if (line_delta >= 0 && line_delta < 3 && column < 128 && end_column < 128) { write_location_info_oneline_form(a, isize, line_delta, column, end_column); a->a_lineno = i->i_loc.lineno; - return 1; + return 0; } } write_location_info_long_form(a, i, isize); a->a_lineno = i->i_loc.lineno; - return 1; + return 0; } static int @@ -7869,8 +7899,8 @@ assemble_emit_location(struct assembler* a, struct instr* i) { int isize = instr_size(i); while (isize > 8) { - if (!write_location_info_entry(a, i, 8)) { - return 0; + if (write_location_info_entry(a, i, 8) < 0) { + return -1; } isize -= 8; } @@ -7890,15 +7920,17 @@ assemble_emit(struct assembler *a, struct instr *i) int size = instr_size(i); if (a->a_offset + size >= len / (int)sizeof(_Py_CODEUNIT)) { - if (len > PY_SSIZE_T_MAX / 2) - return 0; - if (_PyBytes_Resize(&a->a_bytecode, len * 2) < 0) - return 0; + if (len > PY_SSIZE_T_MAX / 2) { + return -1; + } + if (_PyBytes_Resize(&a->a_bytecode, len * 2) < 0) { + return -1; + } } code = (_Py_CODEUNIT *)PyBytes_AS_STRING(a->a_bytecode) + a->a_offset; a->a_offset += size; write_instr(code, i, size); - return 1; + return 0; } static int @@ -7961,7 +7993,8 @@ normalize_jumps_in_block(cfg_builder *g, basicblock *b) { if (backwards_jump == NULL) { return -1; } - basicblock_addop(backwards_jump, JUMP, target->b_label, NO_LOCATION); + basicblock_addop(backwards_jump, JUMP, target->b_label, NO_LOCATION, + UNUSED_OPARG, UNUSED_OPARG, UNUSED_OPARG); backwards_jump->b_instr[0].i_target = target; last->i_opcode = reversed_opcode; last->i_target = b->b_next; @@ -8080,7 +8113,6 @@ scan_block_for_locals(basicblock *b, basicblock ***sp) for (int i = 0; i < b->b_iused; i++) { struct instr *instr = &b->b_instr[i]; assert(instr->i_opcode != EXTENDED_ARG); - assert(instr->i_opcode != EXTENDED_ARG_QUICK); assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); if (instr->i_except != NULL) { maybe_push(instr->i_except, unsafe_mask, sp); @@ -8137,7 +8169,6 @@ fast_scan_many_locals(basicblock *entryblock, int nlocals) for (int i = 0; i < b->b_iused; i++) { struct instr *instr = &b->b_instr[i]; assert(instr->i_opcode != EXTENDED_ARG); - assert(instr->i_opcode != EXTENDED_ARG_QUICK); assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); int arg = instr->i_oparg; if (arg < 64) { @@ -8228,10 +8259,9 @@ dict_keys_inorder(PyObject *dict, Py_ssize_t offset) return NULL; while (PyDict_Next(dict, &pos, &k, &v)) { i = PyLong_AS_LONG(v); - Py_INCREF(k); assert((i - offset) < size); assert((i - offset) >= 0); - PyTuple_SET_ITEM(tuple, i - offset, k); + PyTuple_SET_ITEM(tuple, i - offset, Py_NewRef(k)); } return tuple; } @@ -8253,10 +8283,9 @@ consts_dict_keys_inorder(PyObject *dict) if (PyTuple_CheckExact(k)) { k = PyTuple_GET_ITEM(k, 1); } - Py_INCREF(k); assert(i < size); assert(i >= 0); - PyList_SET_ITEM(consts, i, k); + PyList_SET_ITEM(consts, i, Py_NewRef(k)); } return consts; } @@ -8283,7 +8312,7 @@ compute_code_flags(struct compiler *c) } /* (Only) inherit compilerflags in PyCF_MASK */ - flags |= (c->c_flags->cf_flags & PyCF_MASK); + flags |= (c->c_flags.cf_flags & PyCF_MASK); if ((IS_TOP_LEVEL_AWAIT(c)) && ste->ste_coroutine && @@ -8302,17 +8331,17 @@ merge_const_one(PyObject *const_cache, PyObject **obj) PyDict_CheckExact(const_cache); PyObject *key = _PyCode_ConstantKey(*obj); if (key == NULL) { - return 0; + return -1; } // t is borrowed reference PyObject *t = PyDict_SetDefault(const_cache, key, key); Py_DECREF(key); if (t == NULL) { - return 0; + return -1; } if (t == key) { // obj is new constant. - return 1; + return 0; } if (PyTuple_CheckExact(t)) { @@ -8320,10 +8349,8 @@ merge_const_one(PyObject *const_cache, PyObject **obj) t = PyTuple_GET_ITEM(t, 1); } - Py_INCREF(t); - Py_DECREF(*obj); - *obj = t; - return 1; + Py_SETREF(*obj, Py_NewRef(t)); + return 0; } // This is in codeobject.c. @@ -8389,7 +8416,7 @@ makecode(struct compiler *c, struct assembler *a, PyObject *constslist, if (!names) { goto error; } - if (!merge_const_one(c->c_const_cache, &names)) { + if (merge_const_one(c->c_const_cache, &names) < 0) { goto error; } @@ -8397,7 +8424,7 @@ makecode(struct compiler *c, struct assembler *a, PyObject *constslist, if (consts == NULL) { goto error; } - if (!merge_const_one(c->c_const_cache, &consts)) { + if (merge_const_one(c->c_const_cache, &consts) < 0) { goto error; } @@ -8409,6 +8436,24 @@ makecode(struct compiler *c, struct assembler *a, PyObject *constslist, assert(INT_MAX - posonlyargcount - posorkwargcount > 0); int kwonlyargcount = (int)c->u->u_kwonlyargcount; + for(int i=0; i < c->u->u_ntmps; i++) { + PyObject *k = PyUnicode_FromFormat("$%d", i); + if (!k) { + goto error; + } + PyObject *v = PyLong_FromSsize_t(nlocalsplus++); + if (!v) { + Py_DECREF(k); + goto error; + } + int ret = PyDict_SetItem(c->u->u_varnames, k, v); + Py_DECREF(k); + Py_DECREF(v); + if (ret < 0) { + goto error; + } + } + localsplusnames = PyTuple_New(nlocalsplus); if (localsplusnames == NULL) { goto error; @@ -8448,7 +8493,7 @@ makecode(struct compiler *c, struct assembler *a, PyObject *constslist, goto error; } - if (!merge_const_one(c->c_const_cache, &localsplusnames)) { + if (merge_const_one(c->c_const_cache, &localsplusnames) < 0) { goto error; } con.localsplusnames = localsplusnames; @@ -8513,7 +8558,7 @@ static int optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache); static int -trim_unused_consts(basicblock *entryblock, PyObject *consts); +remove_unused_consts(basicblock *entryblock, PyObject *consts); /* Duplicates exit BBs, so that line numbers can be propagated to them */ static int @@ -8552,18 +8597,6 @@ build_cellfixedoffsets(struct compiler *c) return fixed; } -static inline int -insert_instruction(basicblock *block, int pos, struct instr *instr) { - if (basicblock_next_instr(block) < 0) { - return -1; - } - for (int i = block->b_iused - 1; i > pos; i--) { - block->b_instr[i] = block->b_instr[i-1]; - } - block->b_instr[pos] = *instr; - return 0; -} - static int insert_prefix_instructions(struct compiler *c, basicblock *entryblock, int *fixed, int nfreevars, int code_flags) @@ -8697,7 +8730,6 @@ fix_cell_offsets(struct compiler *c, basicblock *entryblock, int *fixedmap) struct instr *inst = &b->b_instr[i]; // This is called before extended args are generated. assert(inst->i_opcode != EXTENDED_ARG); - assert(inst->i_opcode != EXTENDED_ARG_QUICK); int oldoffset = inst->i_oparg; switch(inst->i_opcode) { case MAKE_CELL: @@ -8721,6 +8753,17 @@ static void propagate_line_numbers(basicblock *entryblock); #ifndef NDEBUG + +static bool +no_redundant_nops(cfg_builder *g) { + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + if (remove_redundant_nops(b) != 0) { + return false; + } + } + return true; +} + static bool no_redundant_jumps(cfg_builder *g) { for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { @@ -8767,7 +8810,109 @@ remove_redundant_jumps(cfg_builder *g) { } if (last->i_target == b->b_next) { assert(b->b_next->b_iused); - last->i_opcode = NOP; + INSTR_SET_OP0(last, NOP); + } + } + } + return 0; +} + +static int +prepare_localsplus(struct compiler* c, int code_flags) +{ + assert(PyDict_GET_SIZE(c->u->u_varnames) < INT_MAX); + assert(PyDict_GET_SIZE(c->u->u_cellvars) < INT_MAX); + assert(PyDict_GET_SIZE(c->u->u_freevars) < INT_MAX); + int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(c->u->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(c->u->u_freevars); + assert(INT_MAX - nlocals - ncellvars > 0); + assert(INT_MAX - nlocals - ncellvars - nfreevars > 0); + int nlocalsplus = nlocals + ncellvars + nfreevars; + int* cellfixedoffsets = build_cellfixedoffsets(c); + if (cellfixedoffsets == NULL) { + return -1; + } + + cfg_builder* g = CFG_BUILDER(c); + + // This must be called before fix_cell_offsets(). + if (insert_prefix_instructions(c, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { + PyMem_Free(cellfixedoffsets); + return -1; + } + + int numdropped = fix_cell_offsets(c, g->g_entryblock, cellfixedoffsets); + PyMem_Free(cellfixedoffsets); // At this point we're done with it. + cellfixedoffsets = NULL; + if (numdropped < 0) { + return -1; + } + nlocalsplus -= numdropped; + return nlocalsplus; +} + +static int +add_return_at_end_of_block(struct compiler *c, int addNone) +{ + /* Make sure every block that falls off the end returns None. */ + if (!basicblock_returns(CFG_BUILDER(c)->g_curblock)) { + if (addNone) { + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + } + ADDOP(c, NO_LOCATION, RETURN_VALUE); + } + return SUCCESS; +} + +static int +resolve_register(oparg_t *oparg, int nlocalsplus, + int ntmps, int stacksize, Py_ssize_t nconsts) +{ + switch(oparg->type) { + case UNUSED_ARG: + oparg->final = 0; + break; + case EXPLICIT_ARG: + oparg->final = oparg->value; + break; + case CONST_REG: + assert(oparg->value >= 0 && oparg->value < nconsts); + oparg->final = nlocalsplus + ntmps + stacksize + oparg->value; + break; + case NAME_REG: + assert(oparg->value >= 0 && oparg->value < nlocalsplus); + oparg->final = oparg->value; + break; + case TMP_REG: { + assert(oparg->value >= 0 && oparg->value < ntmps); + oparg->final = nlocalsplus + oparg->value; + break; + } + default: + Py_UNREACHABLE(); + } + return 1; +} + +static int +resolve_registers(cfg_builder *g, int nlocalsplus, int ntmps, int stacksize, + Py_ssize_t nconsts) +{ + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + struct instr *inst = &b->b_instr[i]; + if (resolve_register(&inst->i_oparg1, nlocalsplus, ntmps, + stacksize, nconsts) < 0) { + return -1; + } + if (resolve_register(&inst->i_oparg2, nlocalsplus, ntmps, + stacksize, nconsts) < 0) { + return -1; + } + if (resolve_register(&inst->i_oparg3, nlocalsplus, ntmps, + stacksize, nconsts) < 0) { + return -1; } } } @@ -8787,27 +8932,8 @@ assemble(struct compiler *c, int addNone) return NULL; } - /* Make sure every block that falls off the end returns None. */ - if (!basicblock_returns(CFG_BUILDER(c)->g_curblock)) { - UNSET_LOC(c); - if (addNone) { - ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); - } - ADDOP(c, NO_LOCATION, RETURN_VALUE); - } - - assert(PyDict_GET_SIZE(c->u->u_varnames) < INT_MAX); - assert(PyDict_GET_SIZE(c->u->u_cellvars) < INT_MAX); - assert(PyDict_GET_SIZE(c->u->u_freevars) < INT_MAX); - int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(c->u->u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(c->u->u_freevars); - assert(INT_MAX - nlocals - ncellvars > 0); - assert(INT_MAX - nlocals - ncellvars - nfreevars > 0); - int nlocalsplus = nlocals + ncellvars + nfreevars; - int *cellfixedoffsets = build_cellfixedoffsets(c); - if (cellfixedoffsets == NULL) { - goto error; + if (add_return_at_end_of_block(c, addNone) < 0) { + return NULL; } int nblocks = 0; @@ -8832,19 +8958,6 @@ assemble(struct compiler *c, int addNone) } } - // This must be called before fix_cell_offsets(). - if (insert_prefix_instructions(c, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { - goto error; - } - - int numdropped = fix_cell_offsets(c, g->g_entryblock, cellfixedoffsets); - PyMem_Free(cellfixedoffsets); // At this point we're done with it. - cellfixedoffsets = NULL; - if (numdropped < 0) { - goto error; - } - nlocalsplus -= numdropped; - /** Preprocessing **/ /* Map labels to targets and mark exception handlers */ if (translate_jump_labels_to_targets(g->g_entryblock)) { @@ -8868,6 +8981,9 @@ assemble(struct compiler *c, int addNone) if (add_checks_for_loads_of_uninitialized_variables(g->g_entryblock, c) < 0) { goto error; } + if (remove_unused_consts(g->g_entryblock, consts)) { + goto error; + } /** line numbers (TODO: move this before optimization stage) */ if (duplicate_exits_without_lineno(g) < 0) { @@ -8881,6 +8997,12 @@ assemble(struct compiler *c, int addNone) } /** Assembly **/ + + int nlocalsplus = prepare_localsplus(c, code_flags); + if (nlocalsplus < 0) { + goto error; + } + int maxdepth = stackdepth(g->g_entryblock, code_flags); if (maxdepth < 0) { goto error; @@ -8896,53 +9018,60 @@ assemble(struct compiler *c, int addNone) assert(no_redundant_jumps(g)); - /* Can't modify the bytecode after computing jump offsets. */ - assemble_jump_offsets(g->g_entryblock); - - if (trim_unused_consts(g->g_entryblock, consts)) { + Py_ssize_t nconsts = PyList_GET_SIZE(consts); + int ntmps = c->u->u_ntmps; + if (resolve_registers(g, nlocalsplus, ntmps, maxdepth, nconsts) < 0) { goto error; } + /* Can't modify the bytecode after computing jump offsets. */ + assemble_jump_offsets(g->g_entryblock); + /* Create assembler */ - if (!assemble_init(&a, c->u->u_firstlineno)) + if (assemble_init(&a, c->u->u_firstlineno) < 0) { goto error; + } /* Emit code. */ for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - for (int j = 0; j < b->b_iused; j++) - if (!assemble_emit(&a, &b->b_instr[j])) + for (int j = 0; j < b->b_iused; j++) { + if (assemble_emit(&a, &b->b_instr[j]) < 0) { goto error; + } + } } /* Emit location info */ a.a_lineno = c->u->u_firstlineno; for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - for (int j = 0; j < b->b_iused; j++) - if (!assemble_emit_location(&a, &b->b_instr[j])) + for (int j = 0; j < b->b_iused; j++) { + if (assemble_emit_location(&a, &b->b_instr[j]) < 0) { goto error; + } + } } - if (!assemble_exception_table(&a, g->g_entryblock)) { + if (assemble_exception_table(&a, g->g_entryblock) < 0) { goto error; } if (_PyBytes_Resize(&a.a_except_table, a.a_except_table_off) < 0) { goto error; } - if (!merge_const_one(c->c_const_cache, &a.a_except_table)) { + if (merge_const_one(c->c_const_cache, &a.a_except_table) < 0) { goto error; } if (_PyBytes_Resize(&a.a_linetable, a.a_location_off) < 0) { goto error; } - if (!merge_const_one(c->c_const_cache, &a.a_linetable)) { + if (merge_const_one(c->c_const_cache, &a.a_linetable) < 0) { goto error; } if (_PyBytes_Resize(&a.a_bytecode, a.a_offset * sizeof(_Py_CODEUNIT)) < 0) { goto error; } - if (!merge_const_one(c->c_const_cache, &a.a_bytecode)) { + if (merge_const_one(c->c_const_cache, &a.a_bytecode) < 0) { goto error; } @@ -8950,9 +9079,6 @@ assemble(struct compiler *c, int addNone) error: Py_XDECREF(consts); assemble_free(&a); - if (cellfixedoffsets != NULL) { - PyMem_Free(cellfixedoffsets); - } return co; } @@ -8970,8 +9096,7 @@ get_const_value(int opcode, int oparg, PyObject *co_consts) "Internal error: failed to get value of a constant"); return NULL; } - Py_INCREF(constant); - return constant; + return Py_NewRef(constant); } /* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n @@ -9011,7 +9136,7 @@ fold_tuple_on_constants(PyObject *const_cache, } PyTuple_SET_ITEM(newconst, i, constant); } - if (merge_const_one(const_cache, &newconst) == 0) { + if (merge_const_one(const_cache, &newconst) < 0) { Py_DECREF(newconst); return -1; } @@ -9035,10 +9160,9 @@ fold_tuple_on_constants(PyObject *const_cache, } Py_DECREF(newconst); for (int i = 0; i < n; i++) { - inst[i].i_opcode = NOP; + INSTR_SET_OP0(&inst[i], NOP); } - inst[n].i_opcode = LOAD_CONST; - inst[n].i_oparg = (int)index; + INSTR_SET_OP1(&inst[n], LOAD_CONST, (int)index); return 0; } @@ -9135,7 +9259,7 @@ swaptimize(basicblock *block, int *ix) } // NOP out any unused instructions: while (0 <= current) { - instructions[current--].i_opcode = NOP; + INSTR_SET_OP0(&instructions[current--], NOP); } PyMem_Free(stack); *ix += len - 1; @@ -9201,7 +9325,7 @@ apply_static_swaps(basicblock *block, int i) } } // Success! - swap->i_opcode = NOP; + INSTR_SET_OP0(swap, NOP); struct instr temp = block->b_instr[j]; block->b_instr[j] = block->b_instr[k]; block->b_instr[k] = temp; @@ -9238,7 +9362,7 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) assert(PyDict_CheckExact(const_cache)); assert(PyList_CheckExact(consts)); struct instr nop; - nop.i_opcode = NOP; + INSTR_SET_OP0(&nop, NOP); struct instr *target; for (int i = 0; i < bb->b_iused; i++) { struct instr *inst = &bb->b_instr[i]; @@ -9272,13 +9396,13 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) if (is_true == -1) { goto error; } - inst->i_opcode = NOP; + INSTR_SET_OP0(inst, NOP); jump_if_true = nextop == POP_JUMP_IF_TRUE; if (is_true == jump_if_true) { bb->b_instr[i+1].i_opcode = JUMP; } else { - bb->b_instr[i+1].i_opcode = NOP; + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); } break; case JUMP_IF_FALSE_OR_POP: @@ -9297,8 +9421,8 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) bb->b_instr[i+1].i_opcode = JUMP; } else { - inst->i_opcode = NOP; - bb->b_instr[i+1].i_opcode = NOP; + INSTR_SET_OP0(inst, NOP); + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); } break; case IS_OP: @@ -9309,8 +9433,8 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) int jump_op = i+2 < bb->b_iused ? bb->b_instr[i+2].i_opcode : 0; if (Py_IsNone(cnt) && (jump_op == POP_JUMP_IF_FALSE || jump_op == POP_JUMP_IF_TRUE)) { unsigned char nextarg = bb->b_instr[i+1].i_oparg; - inst->i_opcode = NOP; - bb->b_instr[i+1].i_opcode = NOP; + INSTR_SET_OP0(inst, NOP); + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); bb->b_instr[i+2].i_opcode = nextarg ^ (jump_op == POP_JUMP_IF_FALSE) ? POP_JUMP_IF_NOT_NONE : POP_JUMP_IF_NONE; } @@ -9328,12 +9452,12 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) if (nextop == UNPACK_SEQUENCE && oparg == bb->b_instr[i+1].i_oparg) { switch(oparg) { case 1: - inst->i_opcode = NOP; - bb->b_instr[i+1].i_opcode = NOP; + INSTR_SET_OP0(inst, NOP); + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); continue; case 2: case 3: - inst->i_opcode = NOP; + INSTR_SET_OP0(inst, NOP); bb->b_instr[i+1].i_opcode = SWAP; continue; } @@ -9442,7 +9566,7 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) break; case SWAP: if (oparg == 1) { - inst->i_opcode = NOP; + INSTR_SET_OP0(inst, NOP); break; } if (swaptimize(bb, &i)) { @@ -9454,8 +9578,7 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) break; case PUSH_NULL: if (nextop == LOAD_GLOBAL && (inst[1].i_opcode & 1) == 0) { - inst->i_opcode = NOP; - inst->i_oparg = 0; + INSTR_SET_OP0(inst, NOP); inst[1].i_oparg |= 1; } break; @@ -9484,7 +9607,7 @@ inline_small_exit_blocks(basicblock *bb) { } basicblock *target = last->i_target; if (basicblock_exits_scope(target) && target->b_iused <= MAX_COPY_SIZE) { - last->i_opcode = NOP; + INSTR_SET_OP0(last, NOP); if (basicblock_append_instructions(bb, target) < 0) { return -1; } @@ -9493,7 +9616,7 @@ inline_small_exit_blocks(basicblock *bb) { return 0; } -static void +static int remove_redundant_nops(basicblock *bb) { /* Remove NOPs when legal to do so. */ int dest = 0; @@ -9541,7 +9664,9 @@ remove_redundant_nops(basicblock *bb) { prev_lineno = lineno; } assert(dest <= bb->b_iused); + int num_removed = bb->b_iused - dest; bb->b_iused = dest; + return num_removed; } static int @@ -9752,55 +9877,130 @@ optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache) b->b_iused = 0; } } - eliminate_empty_basic_blocks(g); for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { remove_redundant_nops(b); } + eliminate_empty_basic_blocks(g); + assert(no_redundant_nops(g)); if (remove_redundant_jumps(g) < 0) { return -1; } return 0; } -// Remove trailing unused constants. + static int -trim_unused_consts(basicblock *entryblock, PyObject *consts) +remove_unused_consts(basicblock *entryblock, PyObject *consts) { assert(PyList_CheckExact(consts)); + Py_ssize_t nconsts = PyList_GET_SIZE(consts); + if (nconsts == 0) { + return 0; /* nothing to do */ + } + Py_ssize_t *index_map = NULL; + Py_ssize_t *reverse_index_map = NULL; + int err = 1; + + index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); + if (index_map == NULL) { + goto end; + } + for (Py_ssize_t i = 1; i < nconsts; i++) { + index_map[i] = -1; + } // The first constant may be docstring; keep it always. - int max_const_index = 0; + index_map[0] = 0; + + /* mark used consts */ for (basicblock *b = entryblock; b != NULL; b = b->b_next) { for (int i = 0; i < b->b_iused; i++) { - if ((b->b_instr[i].i_opcode == LOAD_CONST || - b->b_instr[i].i_opcode == KW_NAMES) && - b->b_instr[i].i_oparg > max_const_index) { - max_const_index = b->b_instr[i].i_oparg; + if (b->b_instr[i].i_opcode == LOAD_CONST || + b->b_instr[i].i_opcode == KW_NAMES) { + + int index = b->b_instr[i].i_oparg; + index_map[index] = index; } } } - if (max_const_index+1 < PyList_GET_SIZE(consts)) { - //fprintf(stderr, "removing trailing consts: max=%d, size=%d\n", - // max_const_index, (int)PyList_GET_SIZE(consts)); - if (PyList_SetSlice(consts, max_const_index+1, - PyList_GET_SIZE(consts), NULL) < 0) { - return 1; + /* now index_map[i] == i if consts[i] is used, -1 otherwise */ + + /* condense consts */ + Py_ssize_t n_used_consts = 0; + for (int i = 0; i < nconsts; i++) { + if (index_map[i] != -1) { + assert(index_map[i] == i); + index_map[n_used_consts++] = index_map[i]; } } - return 0; + if (n_used_consts == nconsts) { + /* nothing to do */ + err = 0; + goto end; + } + + /* move all used consts to the beginning of the consts list */ + assert(n_used_consts < nconsts); + for (Py_ssize_t i = 0; i < n_used_consts; i++) { + Py_ssize_t old_index = index_map[i]; + assert(i <= old_index && old_index < nconsts); + if (i != old_index) { + PyObject *value = PyList_GET_ITEM(consts, index_map[i]); + assert(value != NULL); + PyList_SetItem(consts, i, Py_NewRef(value)); + } + } + + /* truncate the consts list at its new size */ + if (PyList_SetSlice(consts, n_used_consts, nconsts, NULL) < 0) { + goto end; + } + + /* adjust const indices in the bytecode */ + reverse_index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); + if (reverse_index_map == NULL) { + goto end; + } + for (Py_ssize_t i = 0; i < nconsts; i++) { + reverse_index_map[i] = -1; + } + for (Py_ssize_t i = 0; i < n_used_consts; i++) { + assert(index_map[i] != -1); + assert(reverse_index_map[index_map[i]] == -1); + reverse_index_map[index_map[i]] = i; + } + + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + if (b->b_instr[i].i_opcode == LOAD_CONST || + b->b_instr[i].i_opcode == KW_NAMES) { + + int index = b->b_instr[i].i_oparg; + assert(reverse_index_map[index] >= 0); + assert(reverse_index_map[index] < n_used_consts); + b->b_instr[i].i_oparg = (int)reverse_index_map[index]; + } + } + } + + err = 0; +end: + PyMem_Free(index_map); + PyMem_Free(reverse_index_map); + return err; } -static inline int +static inline bool is_exit_without_lineno(basicblock *b) { if (!basicblock_exits_scope(b)) { - return 0; + return false; } for (int i = 0; i < b->b_iused; i++) { if (b->b_instr[i].i_loc.lineno >= 0) { - return 0; + return false; } } - return 1; + return true; } /* PEP 626 mandates that the f_lineno of a frame is correct @@ -9855,6 +10055,9 @@ duplicate_exits_without_lineno(cfg_builder *g) /* Access to compiler optimizations for unit tests. + * + * _PyCompile_CodeGen takes and AST, applies code-gen and + * returns the unoptimized CFG as an instruction list. * * _PyCompile_OptimizeCfg takes an instruction list, constructs * a CFG, optimizes it and converts back to an instruction list. @@ -9918,7 +10121,8 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g) if (PyErr_Occurred()) { return -1; } - if (!cfg_builder_addop(g, opcode, oparg, loc)) { + if (cfg_builder_addop(g, opcode, oparg, loc, + UNUSED_OPARG, UNUSED_OPARG, UNUSED_OPARG) < 0) { return -1; } } @@ -9950,7 +10154,9 @@ cfg_to_instructions(cfg_builder *g) for (int i = 0; i < b->b_iused; i++) { struct instr *instr = &b->b_instr[i]; location loc = instr->i_loc; - int arg = HAS_TARGET(instr->i_opcode) ? instr->i_target->b_label : instr->i_oparg; + int arg = HAS_TARGET(instr->i_opcode) ? + instr->i_target->b_label : instr->i_oparg; + PyObject *inst_tuple = Py_BuildValue( "(iiiiii)", instr->i_opcode, arg, loc.lineno, loc.end_lineno, @@ -9973,6 +10179,52 @@ cfg_to_instructions(cfg_builder *g) return NULL; } +PyObject * +_PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags, + int optimize) +{ + PyObject *res = NULL; + + if (!PyAST_Check(ast)) { + PyErr_SetString(PyExc_TypeError, "expected an AST"); + return NULL; + } + + PyArena *arena = _PyArena_New(); + if (arena == NULL) { + return NULL; + } + + mod_ty mod = PyAST_obj2mod(ast, arena, 0 /* exec */); + if (mod == NULL || !_PyAST_Validate(mod)) { + _PyArena_Free(arena); + return NULL; + } + + struct compiler *c = new_compiler(mod, filename, pflags, optimize, arena); + if (c == NULL) { + _PyArena_Free(arena); + return NULL; + } + + if (compiler_codegen(c, mod) < 0) { + goto finally; + } + + cfg_builder *g = CFG_BUILDER(c); + + if (translate_jump_labels_to_targets(g->g_entryblock) < 0) { + goto finally; + } + + res = cfg_to_instructions(g); + +finally: + compiler_exit_scope(c); + compiler_free(c); + _PyArena_Free(arena); + return res; +} PyObject * _PyCompile_OptimizeCfg(PyObject *instructions, PyObject *consts) @@ -10012,6 +10264,5 @@ PyObject * PyCode_Optimize(PyObject *code, PyObject* Py_UNUSED(consts), PyObject *Py_UNUSED(names), PyObject *Py_UNUSED(lnotab_obj)) { - Py_INCREF(code); - return code; + return Py_NewRef(code); } diff --git a/Python/context.c b/Python/context.c index ef9db6a9cd063b..5d385508405ede 100644 --- a/Python/context.c +++ b/Python/context.c @@ -124,8 +124,7 @@ _PyContext_Enter(PyThreadState *ts, PyObject *octx) ctx->ctx_prev = (PyContext *)ts->context; /* borrow */ ctx->ctx_entered = 1; - Py_INCREF(ctx); - ts->context = (PyObject *)ctx; + ts->context = Py_NewRef(ctx); ts->context_ver++; return 0; @@ -400,8 +399,7 @@ context_new_from_vars(PyHamtObject *vars) return NULL; } - Py_INCREF(vars); - ctx->ctx_vars = vars; + ctx->ctx_vars = (PyHamtObject*)Py_NewRef(vars); _PyObject_GC_TRACK(ctx); return ctx; @@ -546,8 +544,7 @@ context_tp_subscript(PyContext *self, PyObject *key) PyErr_SetObject(PyExc_KeyError, key); return NULL; } - Py_INCREF(val); - return val; + return Py_NewRef(val); } static int @@ -588,11 +585,9 @@ _contextvars_Context_get_impl(PyContext *self, PyObject *key, return NULL; } if (found == 0) { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } - Py_INCREF(val); - return val; + return Py_NewRef(val); } @@ -831,11 +826,9 @@ contextvar_new(PyObject *name, PyObject *def) return NULL; } - Py_INCREF(name); - var->var_name = name; + var->var_name = Py_NewRef(name); - Py_XINCREF(def); - var->var_default = def; + var->var_default = Py_XNewRef(def); var->var_cached = NULL; var->var_cached_tsid = 0; @@ -1176,8 +1169,7 @@ token_tp_repr(PyContextToken *self) static PyObject * token_get_var(PyContextToken *self, void *Py_UNUSED(ignored)) { - Py_INCREF(self->tok_var); - return (PyObject *)self->tok_var; + return Py_NewRef(self->tok_var);; } static PyObject * @@ -1187,8 +1179,7 @@ token_get_old_value(PyContextToken *self, void *Py_UNUSED(ignored)) return get_token_missing(); } - Py_INCREF(self->tok_oldval); - return self->tok_oldval; + return Py_NewRef(self->tok_oldval); } static PyGetSetDef PyContextTokenType_getsetlist[] = { @@ -1228,14 +1219,11 @@ token_new(PyContext *ctx, PyContextVar *var, PyObject *val) return NULL; } - Py_INCREF(ctx); - tok->tok_ctx = ctx; + tok->tok_ctx = (PyContext*)Py_NewRef(ctx); - Py_INCREF(var); - tok->tok_var = var; + tok->tok_var = (PyContextVar*)Py_NewRef(var); - Py_XINCREF(val); - tok->tok_oldval = val; + tok->tok_oldval = Py_XNewRef(val); tok->tok_used = 0; @@ -1247,25 +1235,29 @@ token_new(PyContext *ctx, PyContextVar *var, PyObject *val) /////////////////////////// Token.MISSING -static PyObject *_token_missing; - - -typedef struct { - PyObject_HEAD -} PyContextTokenMissing; - - static PyObject * context_token_missing_tp_repr(PyObject *self) { return PyUnicode_FromString("<Token.MISSING>"); } +static void +context_token_missing_tp_dealloc(_PyContextTokenMissing *Py_UNUSED(self)) +{ +#ifdef Py_DEBUG + /* The singleton is statically allocated. */ + _Py_FatalRefcountError("deallocating the token missing singleton"); +#else + return; +#endif +} + PyTypeObject _PyContextTokenMissing_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "Token.MISSING", - sizeof(PyContextTokenMissing), + sizeof(_PyContextTokenMissing), + .tp_dealloc = (destructor)context_token_missing_tp_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT, .tp_repr = context_token_missing_tp_repr, @@ -1275,19 +1267,7 @@ PyTypeObject _PyContextTokenMissing_Type = { static PyObject * get_token_missing(void) { - if (_token_missing != NULL) { - Py_INCREF(_token_missing); - return _token_missing; - } - - _token_missing = (PyObject *)PyObject_New( - PyContextTokenMissing, &_PyContextTokenMissing_Type); - if (_token_missing == NULL) { - return NULL; - } - - Py_INCREF(_token_missing); - return _token_missing; + return Py_NewRef(&_Py_SINGLETON(context_token_missing)); } @@ -1312,15 +1292,11 @@ _PyContext_ClearFreeList(PyInterpreterState *interp) void _PyContext_Fini(PyInterpreterState *interp) { - if (_Py_IsMainInterpreter(interp)) { - Py_CLEAR(_token_missing); - } _PyContext_ClearFreeList(interp); #if defined(Py_DEBUG) && PyContext_MAXFREELIST > 0 struct _Py_context_state *state = &interp->context; state->numfree = -1; #endif - _PyHamt_Fini(interp); } diff --git a/Python/dtoa.c b/Python/dtoa.c index 733e70bc791698..cff5f1b0658eae 100644 --- a/Python/dtoa.c +++ b/Python/dtoa.c @@ -119,6 +119,7 @@ #include "Python.h" #include "pycore_dtoa.h" // _PY_SHORT_FLOAT_REPR +#include "pycore_runtime.h" // _PyRuntime #include <stdlib.h> // exit() /* if _PY_SHORT_FLOAT_REPR == 0, then don't even try to compile @@ -156,7 +157,7 @@ #endif -typedef uint32_t ULong; +// ULong is defined in pycore_dtoa.h. typedef int32_t Long; typedef uint64_t ULLong; @@ -171,12 +172,6 @@ typedef uint64_t ULLong; #define Bug(x) {fprintf(stderr, "%s\n", x); exit(1);} #endif -#ifndef PRIVATE_MEM -#define PRIVATE_MEM 2304 -#endif -#define PRIVATE_mem ((PRIVATE_MEM+sizeof(double)-1)/sizeof(double)) -static double private_mem[PRIVATE_mem], *pmem_next = private_mem; - #ifdef __cplusplus extern "C" { #endif @@ -298,8 +293,6 @@ BCinfo { #define FFFFFFFF 0xffffffffUL -#define Kmax 7 - /* struct Bigint is used to represent arbitrary-precision integers. These integers are stored in sign-magnitude format, with the magnitude stored as an array of base 2**32 digits. Bigints are always normalized: if x is a @@ -322,13 +315,7 @@ BCinfo { significant (x[0]) to most significant (x[wds-1]). */ -struct -Bigint { - struct Bigint *next; - int k, maxwds, sign, wds; - ULong x[1]; -}; - +// struct Bigint is defined in pycore_dtoa.h. typedef struct Bigint Bigint; #ifndef Py_USING_MEMORY_DEBUGGER @@ -352,7 +339,9 @@ typedef struct Bigint Bigint; Bfree to PyMem_Free. Investigate whether this has any significant performance on impact. */ -static Bigint *freelist[Kmax+1]; +#define freelist _PyRuntime.dtoa.freelist +#define private_mem _PyRuntime.dtoa.preallocated +#define pmem_next _PyRuntime.dtoa.preallocated_next /* Allocate space for a Bigint with up to 1<<k digits */ @@ -363,13 +352,15 @@ Balloc(int k) Bigint *rv; unsigned int len; - if (k <= Kmax && (rv = freelist[k])) + if (k <= Bigint_Kmax && (rv = freelist[k])) freelist[k] = rv->next; else { x = 1 << k; len = (sizeof(Bigint) + (x-1)*sizeof(ULong) + sizeof(double) - 1) /sizeof(double); - if (k <= Kmax && pmem_next - private_mem + len <= (Py_ssize_t)PRIVATE_mem) { + if (k <= Bigint_Kmax && + pmem_next - private_mem + len <= (Py_ssize_t)Bigint_PREALLOC_SIZE + ) { rv = (Bigint*)pmem_next; pmem_next += len; } @@ -391,7 +382,7 @@ static void Bfree(Bigint *v) { if (v) { - if (v->k > Kmax) + if (v->k > Bigint_Kmax) FREE((void*)v); else { v->next = freelist[v->k]; @@ -400,6 +391,10 @@ Bfree(Bigint *v) } } +#undef pmem_next +#undef private_mem +#undef freelist + #else /* Alternative versions of Balloc and Bfree that use PyMem_Malloc and @@ -678,10 +673,6 @@ mult(Bigint *a, Bigint *b) #ifndef Py_USING_MEMORY_DEBUGGER -/* p5s is a linked list of powers of 5 of the form 5**(2**i), i >= 2 */ - -static Bigint *p5s; - /* multiply the Bigint b by 5**k. Returns a pointer to the result, or NULL on failure; if the returned pointer is distinct from b then the original Bigint b will have been Bfree'd. Ignores the sign of b. */ @@ -701,7 +692,7 @@ pow5mult(Bigint *b, int k) if (!(k >>= 2)) return b; - p5 = p5s; + p5 = _PyRuntime.dtoa.p5s; if (!p5) { /* first time */ p5 = i2b(625); @@ -709,7 +700,7 @@ pow5mult(Bigint *b, int k) Bfree(b); return NULL; } - p5s = p5; + _PyRuntime.dtoa.p5s = p5; p5->next = 0; } for(;;) { diff --git a/Python/errors.c b/Python/errors.c index fc3e4689209456..05ef62246ec0a4 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -37,8 +37,7 @@ _PyErr_Restore(PyThreadState *tstate, PyObject *type, PyObject *value, if (traceback != NULL && !PyTraceBack_Check(traceback)) { /* XXX Should never happen -- fatal error instead? */ /* Well, it could be None. */ - Py_DECREF(traceback); - traceback = NULL; + Py_SETREF(traceback, NULL); } /* Save these in locals to safeguard against recursive @@ -178,8 +177,7 @@ _PyErr_SetObject(PyThreadState *tstate, PyObject *exception, PyObject *value) } if (value != NULL && PyExceptionInstance_Check(value)) tb = PyException_GetTraceback(value); - Py_XINCREF(exception); - _PyErr_Restore(tstate, exception, value, tb); + _PyErr_Restore(tstate, Py_XNewRef(exception), value, tb); } void @@ -326,8 +324,7 @@ _PyErr_NormalizeException(PyThreadState *tstate, PyObject **exc, set to NULL. */ if (!value) { - value = Py_None; - Py_INCREF(value); + value = Py_NewRef(Py_None); } /* Normalize the exception so that if the type is a class, the @@ -355,16 +352,13 @@ _PyErr_NormalizeException(PyThreadState *tstate, PyObject **exc, if (fixed_value == NULL) { goto error; } - Py_DECREF(value); - value = fixed_value; + Py_SETREF(value, fixed_value); } /* If the class of the instance doesn't exactly match the class of the type, believe the instance. */ else if (inclass != type) { - Py_INCREF(inclass); - Py_DECREF(type); - type = inclass; + Py_SETREF(type, Py_NewRef(inclass)); } } *exc = type; @@ -490,13 +484,9 @@ _PyErr_GetExcInfo(PyThreadState *tstate, { _PyErr_StackItem *exc_info = _PyErr_GetTopmostException(tstate); - *p_type = get_exc_type(exc_info->exc_value); - *p_value = exc_info->exc_value; - *p_traceback = get_exc_traceback(exc_info->exc_value); - - Py_XINCREF(*p_type); - Py_XINCREF(*p_value); - Py_XINCREF(*p_traceback); + *p_type = Py_XNewRef(get_exc_type(exc_info->exc_value)); + *p_value = Py_XNewRef(exc_info->exc_value); + *p_traceback = Py_XNewRef(get_exc_traceback(exc_info->exc_value)); } PyObject* @@ -675,9 +665,9 @@ _PyErr_FormatVFromCause(PyThreadState *tstate, PyObject *exception, _PyErr_Fetch(tstate, &exc, &val2, &tb); _PyErr_NormalizeException(tstate, &exc, &val2, &tb); - Py_INCREF(val); - PyException_SetCause(val2, val); - PyException_SetContext(val2, val); + PyException_SetCause(val2, Py_NewRef(val)); + PyException_SetContext(val2, Py_NewRef(val)); + Py_DECREF(val); _PyErr_Restore(tstate, exc, val2, tb); return NULL; @@ -1166,9 +1156,7 @@ PyErr_NewException(const char *name, PyObject *base, PyObject *dict) goto failure; } if (PyTuple_Check(base)) { - bases = base; - /* INCREF as we create a new ref in the else branch */ - Py_INCREF(bases); + bases = Py_NewRef(base); } else { bases = PyTuple_Pack(1, base); if (bases == NULL) @@ -1287,8 +1275,7 @@ make_unraisable_hook_args(PyThreadState *tstate, PyObject *exc_type, if (exc_type == NULL) { \ exc_type = Py_None; \ } \ - Py_INCREF(exc_type); \ - PyStructSequence_SET_ITEM(args, pos++, exc_type); \ + PyStructSequence_SET_ITEM(args, pos++, Py_NewRef(exc_type)); \ } while (0) diff --git a/Python/fileutils.c b/Python/fileutils.c index fb1e5ef9a03026..244bd899b3bd24 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -191,7 +191,7 @@ extern int _Py_normalize_encoding(const char *, char *, size_t); Py_DecodeLocale() uses mbstowcs() -1: unknown, need to call check_force_ascii() to get the value */ -static int force_ascii = -1; +#define force_ascii (_PyRuntime.fileutils.force_ascii) static int check_force_ascii(void) diff --git a/Python/frame.c b/Python/frame.c index 89f084b110cb5a..6e3c4eec95b7ff 100644 --- a/Python/frame.c +++ b/Python/frame.c @@ -69,7 +69,9 @@ void _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame *dest) { assert(src->stacktop >= src->f_code->co_nlocalsplus); - Py_ssize_t size = ((char*)&src->localsplus[src->stacktop]) - (char *)src; + int nconsts = (int)PyTuple_Size(src->f_code->co_consts); + int nregisters = src->f_code->co_nlocalsplus + src->f_code->co_stacksize + nconsts; + Py_ssize_t size = ((char*)&src->localsplus[nregisters]) - (char *)src; memcpy(dest, src, size); // Don't leave a dangling pointer to the old frame when creating generators // and coroutines: @@ -80,6 +82,7 @@ _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame *dest) static void take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) { + assert(frame->owner != FRAME_OWNED_BY_CSTACK); assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT); assert(frame->owner != FRAME_CLEARED); Py_ssize_t size = ((char*)&frame->localsplus[frame->stacktop]) - (char *)frame; @@ -99,7 +102,9 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) while (prev && _PyFrame_IsIncomplete(prev)) { prev = prev->previous; } + frame->previous = NULL; if (prev) { + assert(prev->owner != FRAME_OWNED_BY_CSTACK); /* Link PyFrameObjects.f_back and remove link through _PyInterpreterFrame.previous */ PyFrameObject *back = _PyFrame_GetFrameObject(prev); if (back == NULL) { @@ -111,7 +116,6 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) else { f->f_back = (PyFrameObject *)Py_NewRef(back); } - frame->previous = NULL; } if (!_PyObject_GC_IS_TRACKED((PyObject *)f)) { _PyObject_GC_TRACK((PyObject *)f); @@ -125,6 +129,9 @@ _PyFrame_Clear(_PyInterpreterFrame *frame) * to have cleared the enclosing generator, if any. */ assert(frame->owner != FRAME_OWNED_BY_GENERATOR || _PyFrame_GetGenerator(frame)->gi_frame_state == FRAME_CLEARED); + // GH-99729: Clearing this frame can expose the stack (via finalizers). It's + // crucial that this frame has been unlinked, and is no longer visible: + assert(_PyThreadState_GET()->cframe->current_frame != frame); if (frame->frame_obj) { PyFrameObject *f = frame->frame_obj; frame->frame_obj = NULL; diff --git a/Python/future.c b/Python/future.c index 2a45d2ebeab9a1..d56f7330964684 100644 --- a/Python/future.c +++ b/Python/future.c @@ -96,22 +96,14 @@ future_parse(PyFutureFeatures *ff, mod_ty mod, PyObject *filename) } -PyFutureFeatures * -_PyFuture_FromAST(mod_ty mod, PyObject *filename) +int +_PyFuture_FromAST(mod_ty mod, PyObject *filename, PyFutureFeatures *ff) { - PyFutureFeatures *ff; - - ff = (PyFutureFeatures *)PyObject_Malloc(sizeof(PyFutureFeatures)); - if (ff == NULL) { - PyErr_NoMemory(); - return NULL; - } ff->ff_features = 0; ff->ff_location = (_PyCompilerSrcLocation){-1, -1, -1, -1}; if (!future_parse(ff, mod, filename)) { - PyObject_Free(ff); - return NULL; + return 0; } - return ff; + return 1; } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h new file mode 100644 index 00000000000000..e897aff433f1ab --- /dev/null +++ b/Python/generated_cases.c.h @@ -0,0 +1,3918 @@ +// This file is generated by Tools/cases_generator/generate_cases.py +// from Python/bytecodes.c +// Do not edit! + + TARGET(NOP) { + DISPATCH(); + } + + TARGET(RESUME) { + assert(tstate->cframe == &cframe); + assert(frame == cframe.current_frame); + if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + goto handle_eval_breaker; + } + DISPATCH(); + } + + TARGET(LOAD_CLOSURE) { + PyObject *value; + /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ + value = GETLOCAL(oparg); + if (value == NULL) goto unbound_local_error; + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(LOAD_FAST_CHECK) { + PyObject *value; + value = GETLOCAL(oparg); + if (value == NULL) goto unbound_local_error; + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(LOAD_FAST) { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(LOAD_FAST_R) { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(LOAD_CONST) { + PREDICTED(LOAD_CONST); + PyObject *value; + value = GETITEM(consts, oparg); + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(STORE_FAST) { + PyObject *value = PEEK(1); + SETLOCAL(oparg, value); + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(STORE_FAST_R) { + PyObject *value = PEEK(1); + SETLOCAL(oparg, value); + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(LOAD_FAST__LOAD_FAST) { + PyObject *_tmp_1; + PyObject *_tmp_2; + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_2 = value; + } + NEXTOPARG(); + JUMPBY(OPSIZE); + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_1 = value; + } + STACK_GROW(2); + POKE(1, _tmp_1); + POKE(2, _tmp_2); + DISPATCH(); + } + + TARGET(LOAD_FAST__LOAD_CONST) { + PyObject *_tmp_1; + PyObject *_tmp_2; + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_2 = value; + } + NEXTOPARG(); + JUMPBY(OPSIZE); + { + PyObject *value; + value = GETITEM(consts, oparg); + Py_INCREF(value); + _tmp_1 = value; + } + STACK_GROW(2); + POKE(1, _tmp_1); + POKE(2, _tmp_2); + DISPATCH(); + } + + TARGET(STORE_FAST__LOAD_FAST) { + PyObject *_tmp_1 = PEEK(1); + { + PyObject *value = _tmp_1; + SETLOCAL(oparg, value); + } + NEXTOPARG(); + JUMPBY(OPSIZE); + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_1 = value; + } + POKE(1, _tmp_1); + DISPATCH(); + } + + TARGET(STORE_FAST__STORE_FAST) { + PyObject *_tmp_1 = PEEK(1); + PyObject *_tmp_2 = PEEK(2); + { + PyObject *value = _tmp_1; + SETLOCAL(oparg, value); + } + NEXTOPARG(); + JUMPBY(OPSIZE); + { + PyObject *value = _tmp_2; + SETLOCAL(oparg, value); + } + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(LOAD_CONST__LOAD_FAST) { + PyObject *_tmp_1; + PyObject *_tmp_2; + { + PyObject *value; + value = GETITEM(consts, oparg); + Py_INCREF(value); + _tmp_2 = value; + } + NEXTOPARG(); + JUMPBY(OPSIZE); + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_1 = value; + } + STACK_GROW(2); + POKE(1, _tmp_1); + POKE(2, _tmp_2); + DISPATCH(); + } + + TARGET(POP_TOP) { + PyObject *value = PEEK(1); + Py_DECREF(value); + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(PUSH_NULL) { + PyObject *res; + res = NULL; + STACK_GROW(1); + POKE(1, res); + DISPATCH(); + } + + TARGET(END_FOR) { + PyObject *_tmp_1 = PEEK(1); + PyObject *_tmp_2 = PEEK(2); + { + PyObject *value = _tmp_1; + Py_DECREF(value); + } + { + PyObject *value = _tmp_2; + Py_DECREF(value); + } + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(UNARY_POSITIVE) { + PyObject *value = PEEK(1); + PyObject *res; + res = PyNumber_Positive(value); + Py_DECREF(value); + if (res == NULL) goto pop_1_error; + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_POSITIVE_R) { + PyObject *value = REG(oparg1); + PyObject *res; + assert(value != NULL); + res = PyNumber_Positive(value); + if (res == NULL) goto error; + Py_XSETREF(REG(oparg2), res); + DISPATCH(); + } + + TARGET(UNARY_NEGATIVE) { + PyObject *value = PEEK(1); + PyObject *res; + res = PyNumber_Negative(value); + Py_DECREF(value); + if (res == NULL) goto pop_1_error; + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_NEGATIVE_R) { + PyObject *value = REG(oparg1); + assert(value != NULL); + PyObject *res = PyNumber_Negative(value); + Py_XSETREF(REG(oparg2), res); + if (res == NULL) goto error; + DISPATCH(); + } + + TARGET(UNARY_NOT) { + PyObject *value = PEEK(1); + PyObject *res; + int err = PyObject_IsTrue(value); + Py_DECREF(value); + if (err < 0) goto pop_1_error; + if (err == 0) { + res = Py_True; + } + else { + res = Py_False; + } + Py_INCREF(res); + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_NOT_R) { + PyObject *value = REG(oparg1); + assert(value != NULL); + int err = PyObject_IsTrue(value); + if (err < 0) goto error; + PyObject *res; + if (err == 0) { + res = Py_True; + } + else { + res = Py_False; + } + Py_INCREF(res); + Py_XSETREF(REG(oparg2), res); + DISPATCH(); + } + + TARGET(UNARY_INVERT) { + PyObject *value = PEEK(1); + PyObject *res; + res = PyNumber_Invert(value); + Py_DECREF(value); + if (res == NULL) goto pop_1_error; + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_INVERT_R) { + PyObject *value = REG(oparg1); + assert(value != NULL); + PyObject *res = PyNumber_Invert(value); + if (res == NULL) goto error; + Py_XSETREF(REG(oparg2), res); + DISPATCH(); + } + + TARGET(BINARY_OP_MULTIPLY_INT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *prod; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + prod = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + if (prod == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, prod); + JUMPBY(1); + DISPATCH(); + } + + TARGET(BINARY_OP_MULTIPLY_FLOAT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *prod; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dprod = ((PyFloatObject *)left)->ob_fval * + ((PyFloatObject *)right)->ob_fval; + prod = PyFloat_FromDouble(dprod); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + if (prod == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, prod); + JUMPBY(1); + DISPATCH(); + } + + TARGET(BINARY_OP_SUBTRACT_INT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sub; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sub = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + if (sub == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sub); + JUMPBY(1); + DISPATCH(); + } + + TARGET(BINARY_OP_SUBTRACT_FLOAT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sub; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; + sub = PyFloat_FromDouble(dsub); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + if (sub == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sub); + JUMPBY(1); + DISPATCH(); + } + + TARGET(BINARY_OP_ADD_UNICODE) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + res = PyUnicode_Concat(left, right); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + if (res == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, res); + JUMPBY(1); + DISPATCH(); + } + + TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; + assert(_Py_OPCODE(true_next) == STORE_FAST || + _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST); + PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next)); + DEOPT_IF(*target_local != left, BINARY_OP); + STAT_INC(BINARY_OP, hit); + /* Handle `left = left + right` or `left += right` for str. + * + * When possible, extend `left` in place rather than + * allocating a new PyUnicodeObject. This attempts to avoid + * quadratic behavior when one neglects to use str.join(). + * + * If `left` has only two references remaining (one from + * the stack, one in the locals), DECREFing `left` leaves + * only the locals reference, so PyUnicode_Append knows + * that the string is safe to mutate. + */ + assert(Py_REFCNT(left) >= 2); + _Py_DECREF_NO_DEALLOC(left); + PyUnicode_Append(target_local, right); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + if (*target_local == NULL) goto pop_2_error; + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(BINARY_OP_ADD_FLOAT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sum; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsum = ((PyFloatObject *)left)->ob_fval + + ((PyFloatObject *)right)->ob_fval; + sum = PyFloat_FromDouble(dsum); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + if (sum == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sum); + JUMPBY(1); + DISPATCH(); + } + + TARGET(BINARY_OP_ADD_INT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sum; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sum = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + if (sum == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sum); + JUMPBY(1); + DISPATCH(); + } + + TARGET(BINARY_SUBSCR) { + PREDICTED(BINARY_SUBSCR); + static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 4, "incorrect cache size"); + PyObject *sub = PEEK(1); + PyObject *container = PEEK(2); + PyObject *res; + _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_BinarySubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_SUBSCR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + res = PyObject_GetItem(container, sub); + Py_DECREF(container); + Py_DECREF(sub); + if (res == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, res); + JUMPBY(4); + DISPATCH(); + } + + TARGET(BINARY_SLICE) { + PyObject *stop = PEEK(1); + PyObject *start = PEEK(2); + PyObject *container = PEEK(3); + PyObject *res; + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + // Can't use ERROR_IF() here, because we haven't + // DECREF'ed container yet, and we still own slice. + if (slice == NULL) { + res = NULL; + } + else { + res = PyObject_GetItem(container, slice); + Py_DECREF(slice); + } + Py_DECREF(container); + if (res == NULL) goto pop_3_error; + STACK_SHRINK(2); + POKE(1, res); + DISPATCH(); + } + + TARGET(STORE_SLICE) { + PyObject *stop = PEEK(1); + PyObject *start = PEEK(2); + PyObject *container = PEEK(3); + PyObject *v = PEEK(4); + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + int err; + if (slice == NULL) { + err = 1; + } + else { + err = PyObject_SetItem(container, slice, v); + Py_DECREF(slice); + } + Py_DECREF(v); + Py_DECREF(container); + if (err) goto pop_4_error; + STACK_SHRINK(4); + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_LIST_INT) { + PyObject *sub = PEEK(1); + PyObject *list = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyList_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyList_GET_ITEM(list, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + STACK_SHRINK(1); + POKE(1, res); + JUMPBY(4); + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_TUPLE_INT) { + PyObject *sub = PEEK(1); + PyObject *tuple = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyTuple_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyTuple_GET_ITEM(tuple, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(tuple); + STACK_SHRINK(1); + POKE(1, res); + JUMPBY(4); + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_DICT) { + PyObject *sub = PEEK(1); + PyObject *dict = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyDict_GetItemWithError(dict, sub); + if (res == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetKeyError(sub); + } + Py_DECREF(dict); + Py_DECREF(sub); + if (true) goto pop_2_error; + } + Py_INCREF(res); // Do this before DECREF'ing dict, sub + Py_DECREF(dict); + Py_DECREF(sub); + STACK_SHRINK(1); + POKE(1, res); + JUMPBY(4); + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_GETITEM) { + PyObject *sub = PEEK(1); + PyObject *container = PEEK(2); + uint32_t type_version = read_u32(&next_instr[1].cache); + uint16_t func_version = read_u16(&next_instr[3].cache); + PyTypeObject *tp = Py_TYPE(container); + DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); + assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); + PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; + assert(PyFunction_Check(cached)); + PyFunctionObject *getitem = (PyFunctionObject *)cached; + DEOPT_IF(getitem->func_version != func_version, BINARY_SUBSCR); + PyCodeObject *code = (PyCodeObject *)getitem->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + Py_INCREF(getitem); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem); + STACK_SHRINK(2); + new_frame->localsplus[0] = container; + new_frame->localsplus[1] = sub; + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + DISPATCH_INLINED(new_frame); + } + + TARGET(LIST_APPEND) { + PyObject *v = PEEK(1); + PyObject *list = PEEK(oparg + 1); // +1 to account for v staying on stack + if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error; + STACK_SHRINK(1); + PREDICT(JUMP_BACKWARD); + DISPATCH(); + } + + TARGET(SET_ADD) { + PyObject *v = PEEK(1); + PyObject *set = PEEK(oparg + 1); // +1 to account for v staying on stack + int err = PySet_Add(set, v); + Py_DECREF(v); + if (err) goto pop_1_error; + STACK_SHRINK(1); + PREDICT(JUMP_BACKWARD); + DISPATCH(); + } + + TARGET(STORE_SUBSCR) { + PREDICTED(STORE_SUBSCR); + PyObject *sub = PEEK(1); + PyObject *container = PEEK(2); + PyObject *v = PEEK(3); + uint16_t counter = read_u16(&next_instr[0].cache); + if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_StoreSubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_SUBSCR, deferred); + _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* container[sub] = v */ + int err = PyObject_SetItem(container, sub, v); + Py_DECREF(v); + Py_DECREF(container); + Py_DECREF(sub); + if (err) goto pop_3_error; + STACK_SHRINK(3); + JUMPBY(1); + DISPATCH(); + } + + TARGET(STORE_SUBSCR_LIST_INT) { + PyObject *sub = PEEK(1); + PyObject *list = PEEK(2); + PyObject *value = PEEK(3); + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); + + // Ensure nonnegative, zero-or-one-digit ints. + DEOPT_IF(((size_t)Py_SIZE(sub)) > 1, STORE_SUBSCR); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + // Ensure index < len(list) + DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); + STAT_INC(STORE_SUBSCR, hit); + + PyObject *old_value = PyList_GET_ITEM(list, index); + PyList_SET_ITEM(list, index, value); + assert(old_value != NULL); + Py_DECREF(old_value); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + STACK_SHRINK(3); + JUMPBY(1); + DISPATCH(); + } + + TARGET(STORE_SUBSCR_DICT) { + PyObject *sub = PEEK(1); + PyObject *dict = PEEK(2); + PyObject *value = PEEK(3); + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); + STAT_INC(STORE_SUBSCR, hit); + int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); + Py_DECREF(dict); + if (err) goto pop_3_error; + STACK_SHRINK(3); + JUMPBY(1); + DISPATCH(); + } + + TARGET(DELETE_SUBSCR) { + PyObject *sub = PEEK(1); + PyObject *container = PEEK(2); + /* del container[sub] */ + int err = PyObject_DelItem(container, sub); + Py_DECREF(container); + Py_DECREF(sub); + if (err) goto pop_2_error; + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(PRINT_EXPR) { + PyObject *value = PEEK(1); + PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook)); + PyObject *res; + // Can't use ERROR_IF here. + if (hook == NULL) { + _PyErr_SetString(tstate, PyExc_RuntimeError, + "lost sys.displayhook"); + Py_DECREF(value); + if (true) goto pop_1_error; + } + res = PyObject_CallOneArg(hook, value); + Py_DECREF(value); + if (res == NULL) goto pop_1_error; + Py_DECREF(res); + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(RAISE_VARARGS) { + PyObject *cause = NULL, *exc = NULL; + switch (oparg) { + case 2: + cause = POP(); /* cause */ + /* fall through */ + case 1: + exc = POP(); /* exc */ + /* fall through */ + case 0: + if (do_raise(tstate, exc, cause)) { + goto exception_unwind; + } + break; + default: + _PyErr_SetString(tstate, PyExc_SystemError, + "bad RAISE_VARARGS oparg"); + break; + } + goto error; + } + + TARGET(INTERPRETER_EXIT) { + PyObject *retval = PEEK(1); + assert(frame == &entry_frame); + assert(_PyFrame_IsIncomplete(frame)); + STACK_SHRINK(1); // Since we're not going to DISPATCH() + assert(EMPTY()); + /* Restore previous cframe and return. */ + tstate->cframe = cframe.previous; + tstate->cframe->use_tracing = cframe.use_tracing; + assert(tstate->cframe->current_frame == frame->previous); + assert(!_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallTstate(tstate); + return retval; + } + + TARGET(RETURN_VALUE) { + PyObject *retval = PEEK(1); + STACK_SHRINK(1); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = cframe.current_frame = dying->previous; + _PyEvalFrameClearAndPop(tstate, dying); + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + TARGET(GET_AITER) { + PyObject *obj = PEEK(1); + PyObject *iter; + unaryfunc getter = NULL; + PyTypeObject *type = Py_TYPE(obj); + + if (type->tp_as_async != NULL) { + getter = type->tp_as_async->am_aiter; + } + + if (getter == NULL) { + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an object with " + "__aiter__ method, got %.100s", + type->tp_name); + Py_DECREF(obj); + if (true) goto pop_1_error; + } + + iter = (*getter)(obj); + Py_DECREF(obj); + if (iter == NULL) goto pop_1_error; + + if (Py_TYPE(iter)->tp_as_async == NULL || + Py_TYPE(iter)->tp_as_async->am_anext == NULL) { + + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' received an object from __aiter__ " + "that does not implement __anext__: %.100s", + Py_TYPE(iter)->tp_name); + Py_DECREF(iter); + if (true) goto pop_1_error; + } + POKE(1, iter); + DISPATCH(); + } + + TARGET(GET_ANEXT) { + unaryfunc getter = NULL; + PyObject *next_iter = NULL; + PyObject *awaitable = NULL; + PyObject *aiter = TOP(); + PyTypeObject *type = Py_TYPE(aiter); + + if (PyAsyncGen_CheckExact(aiter)) { + awaitable = type->tp_as_async->am_anext(aiter); + if (awaitable == NULL) { + goto error; + } + } else { + if (type->tp_as_async != NULL){ + getter = type->tp_as_async->am_anext; + } + + if (getter != NULL) { + next_iter = (*getter)(aiter); + if (next_iter == NULL) { + goto error; + } + } + else { + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an iterator with " + "__anext__ method, got %.100s", + type->tp_name); + goto error; + } + + awaitable = _PyCoro_GetAwaitableIter(next_iter); + if (awaitable == NULL) { + _PyErr_FormatFromCause( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(next_iter)->tp_name); + + Py_DECREF(next_iter); + goto error; + } else { + Py_DECREF(next_iter); + } + } + + PUSH(awaitable); + PREDICT(LOAD_CONST); + DISPATCH(); + } + + TARGET(GET_AWAITABLE) { + PREDICTED(GET_AWAITABLE); + PyObject *iterable = TOP(); + PyObject *iter = _PyCoro_GetAwaitableIter(iterable); + + if (iter == NULL) { + format_awaitable_error(tstate, Py_TYPE(iterable), oparg); + } + + Py_DECREF(iterable); + + if (iter != NULL && PyCoro_CheckExact(iter)) { + PyObject *yf = _PyGen_yf((PyGenObject*)iter); + if (yf != NULL) { + /* `iter` is a coroutine object that is being + awaited, `yf` is a pointer to the current awaitable + being awaited on. */ + Py_DECREF(yf); + Py_CLEAR(iter); + _PyErr_SetString(tstate, PyExc_RuntimeError, + "coroutine is being awaited already"); + /* The code below jumps to `error` if `iter` is NULL. */ + } + } + + SET_TOP(iter); /* Even if it's NULL */ + + if (iter == NULL) { + goto error; + } + + PREDICT(LOAD_CONST); + DISPATCH(); + } + + TARGET(SEND) { + assert(frame != &entry_frame); + assert(STACK_LEVEL() >= 2); + PyObject *v = POP(); + PyObject *receiver = TOP(); + PySendResult gen_status; + PyObject *retval; + if (tstate->c_tracefunc == NULL) { + gen_status = PyIter_Send(receiver, v, &retval); + } else { + if (Py_IsNone(v) && PyIter_Check(receiver)) { + retval = Py_TYPE(receiver)->tp_iternext(receiver); + } + else { + retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); + } + if (retval == NULL) { + if (tstate->c_tracefunc != NULL + && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + if (_PyGen_FetchStopIterationValue(&retval) == 0) { + gen_status = PYGEN_RETURN; + } + else { + gen_status = PYGEN_ERROR; + } + } + else { + gen_status = PYGEN_NEXT; + } + } + Py_DECREF(v); + if (gen_status == PYGEN_ERROR) { + assert(retval == NULL); + goto error; + } + if (gen_status == PYGEN_RETURN) { + assert(retval != NULL); + Py_DECREF(receiver); + SET_TOP(retval); + JUMPBY(oparg); + } + else { + assert(gen_status == PYGEN_NEXT); + assert(retval != NULL); + PUSH(retval); + } + DISPATCH(); + } + + TARGET(ASYNC_GEN_WRAP) { + PyObject *v = TOP(); + assert(frame->f_code->co_flags & CO_ASYNC_GENERATOR); + PyObject *w = _PyAsyncGenValueWrapperNew(v); + if (w == NULL) { + goto error; + } + SET_TOP(w); + Py_DECREF(v); + DISPATCH(); + } + + TARGET(YIELD_VALUE) { + // NOTE: It's important that YIELD_VALUE never raises an exception! + // The compiler treats any exception raised here as a failed close() + // or throw() call. + assert(oparg == STACK_LEVEL()); + assert(frame != &entry_frame); + PyObject *retval = POP(); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_SUSPENDED; + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + _Py_LeaveRecursiveCallPy(tstate); + _PyInterpreterFrame *gen_frame = frame; + frame = cframe.current_frame = frame->previous; + gen_frame->previous = NULL; + frame->prev_instr -= frame->yield_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + TARGET(POP_EXCEPT) { + _PyErr_StackItem *exc_info = tstate->exc_info; + PyObject *value = exc_info->exc_value; + exc_info->exc_value = POP(); + Py_XDECREF(value); + DISPATCH(); + } + + TARGET(RERAISE) { + if (oparg) { + PyObject *lasti = PEEK(oparg + 1); + if (PyLong_Check(lasti)) { + frame->prev_instr = _PyCode_CODE(frame->f_code) + PyLong_AsLong(lasti); + assert(!_PyErr_Occurred(tstate)); + } + else { + assert(PyLong_Check(lasti)); + _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); + goto error; + } + } + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + + TARGET(PREP_RERAISE_STAR) { + PyObject *excs = POP(); + assert(PyList_Check(excs)); + PyObject *orig = POP(); + + PyObject *val = _PyExc_PrepReraiseStar(orig, excs); + Py_DECREF(excs); + Py_DECREF(orig); + + if (val == NULL) { + goto error; + } + + PUSH(val); + DISPATCH(); + } + + TARGET(END_ASYNC_FOR) { + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + if (PyErr_GivenExceptionMatches(val, PyExc_StopAsyncIteration)) { + Py_DECREF(val); + Py_DECREF(POP()); + } + else { + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + DISPATCH(); + } + + TARGET(CLEANUP_THROW) { + assert(throwflag); + PyObject *exc_value = TOP(); + assert(exc_value && PyExceptionInstance_Check(exc_value)); + if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { + PyObject *value = ((PyStopIterationObject *)exc_value)->value; + Py_INCREF(value); + Py_DECREF(POP()); // The StopIteration. + Py_DECREF(POP()); // The last sent value. + Py_DECREF(POP()); // The delegated sub-iterator. + PUSH(value); + } + else { + PyObject *exc_type = Py_NewRef(Py_TYPE(exc_value)); + PyObject *exc_traceback = PyException_GetTraceback(exc_value); + _PyErr_Restore(tstate, exc_type, Py_NewRef(exc_value), exc_traceback); + goto exception_unwind; + } + DISPATCH(); + } + + TARGET(STOPITERATION_ERROR) { + assert(frame->owner == FRAME_OWNED_BY_GENERATOR); + PyObject *exc = TOP(); + assert(PyExceptionInstance_Check(exc)); + const char *msg = NULL; + if (PyErr_GivenExceptionMatches(exc, PyExc_StopIteration)) { + msg = "generator raised StopIteration"; + if (frame->f_code->co_flags & CO_ASYNC_GENERATOR) { + msg = "async generator raised StopIteration"; + } + else if (frame->f_code->co_flags & CO_COROUTINE) { + msg = "coroutine raised StopIteration"; + } + } + else if ((frame->f_code->co_flags & CO_ASYNC_GENERATOR) && + PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) + { + /* code in `gen` raised a StopAsyncIteration error: + raise a RuntimeError. + */ + msg = "async generator raised StopAsyncIteration"; + } + if (msg != NULL) { + PyObject *message = _PyUnicode_FromASCII(msg, strlen(msg)); + if (message == NULL) { + goto error; + } + PyObject *error = PyObject_CallOneArg(PyExc_RuntimeError, message); + if (error == NULL) { + Py_DECREF(message); + goto error; + } + assert(PyExceptionInstance_Check(error)); + SET_TOP(error); + PyException_SetCause(error, Py_NewRef(exc)); + // Steal exc reference, rather than Py_NewRef+Py_DECREF + PyException_SetContext(error, exc); + Py_DECREF(message); + } + DISPATCH(); + } + + TARGET(LOAD_ASSERTION_ERROR) { + PyObject *value = PyExc_AssertionError; + PUSH(Py_NewRef(value)); + DISPATCH(); + } + + TARGET(LOAD_BUILD_CLASS) { + PyObject *bc; + if (PyDict_CheckExact(BUILTINS())) { + bc = _PyDict_GetItemWithError(BUILTINS(), + &_Py_ID(__build_class__)); + if (bc == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + } + goto error; + } + Py_INCREF(bc); + } + else { + bc = PyObject_GetItem(BUILTINS(), &_Py_ID(__build_class__)); + if (bc == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + goto error; + } + } + PUSH(bc); + DISPATCH(); + } + + TARGET(STORE_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *v = POP(); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when storing %R", name); + Py_DECREF(v); + goto error; + } + if (PyDict_CheckExact(ns)) + err = PyDict_SetItem(ns, name, v); + else + err = PyObject_SetItem(ns, name, v); + Py_DECREF(v); + if (err != 0) + goto error; + DISPATCH(); + } + + TARGET(DELETE_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when deleting %R", name); + goto error; + } + err = PyObject_DelItem(ns, name); + // Can't use ERROR_IF here. + if (err != 0) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, + name); + goto error; + } + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE) { + PREDICTED(UNPACK_SEQUENCE); + _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *seq = TOP(); + next_instr -= OPSIZE; + _Py_Specialize_UnpackSequence(seq, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(UNPACK_SEQUENCE, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *seq = POP(); + PyObject **top = stack_pointer + oparg; + if (!unpack_iterable(tstate, seq, oparg, -1, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(oparg); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + SET_TOP(Py_NewRef(PyTuple_GET_ITEM(seq, 1))); + PUSH(Py_NewRef(PyTuple_GET_ITEM(seq, 0))); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyTuple_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE_LIST) { + PyObject *seq = TOP(); + DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyList_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_EX) { + int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); + PyObject *seq = POP(); + PyObject **top = stack_pointer + totalargs; + if (!unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(totalargs); + Py_DECREF(seq); + DISPATCH(); + } + + TARGET(STORE_ATTR) { + PREDICTED(STORE_ATTR); + PyObject *owner = PEEK(1); + PyObject *v = PEEK(2); + uint16_t counter = read_u16(&next_instr[0].cache); + if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { + assert(cframe.use_tracing == 0); + PyObject *name = GETITEM(names, oparg); + next_instr -= OPSIZE; + _Py_Specialize_StoreAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_ATTR, deferred); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg); + int err = PyObject_SetAttr(owner, name, v); + Py_DECREF(v); + Py_DECREF(owner); + if (err) goto pop_2_error; + STACK_SHRINK(2); + JUMPBY(4); + DISPATCH(); + } + + TARGET(DELETE_ATTR) { + PyObject *owner = PEEK(1); + PyObject *name = GETITEM(names, oparg); + int err = PyObject_SetAttr(owner, name, (PyObject *)NULL); + Py_DECREF(owner); + if (err) goto pop_1_error; + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(STORE_GLOBAL) { + PyObject *v = PEEK(1); + PyObject *name = GETITEM(names, oparg); + int err = PyDict_SetItem(GLOBALS(), name, v); + Py_DECREF(v); + if (err) goto pop_1_error; + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(DELETE_GLOBAL) { + PyObject *name = GETITEM(names, oparg); + int err; + err = PyDict_DelItem(GLOBALS(), name); + // Can't use ERROR_IF here. + if (err != 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + DISPATCH(); + } + + TARGET(LOAD_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *locals = LOCALS(); + PyObject *v; + if (locals == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when loading %R", name); + goto error; + } + if (PyDict_CheckExact(locals)) { + v = PyDict_GetItemWithError(locals, name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + v = PyObject_GetItem(locals, name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + goto error; + _PyErr_Clear(tstate); + } + } + if (v == NULL) { + v = PyDict_GetItemWithError(GLOBALS(), name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + else { + if (PyDict_CheckExact(BUILTINS())) { + v = PyDict_GetItemWithError(BUILTINS(), name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + } + PUSH(v); + DISPATCH(); + } + + TARGET(LOAD_GLOBAL) { + PREDICTED(LOAD_GLOBAL); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *name = GETITEM(names, oparg>>1); + next_instr -= OPSIZE; + _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_GLOBAL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int push_null = oparg & 1; + PEEK(0) = NULL; + PyObject *name = GETITEM(names, oparg>>1); + PyObject *v; + if (PyDict_CheckExact(GLOBALS()) + && PyDict_CheckExact(BUILTINS())) + { + v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + /* _PyDict_LoadGlobal() returns NULL without raising + * an exception if the key doesn't exist */ + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + /* Slow-path if globals or builtins is not a dict */ + + /* namespace 1: globals */ + v = PyObject_GetItem(GLOBALS(), name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + + /* namespace 2: builtins */ + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + /* Skip over inline cache */ + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STACK_GROW(push_null); + PUSH(v); + DISPATCH(); + } + + TARGET(LOAD_GLOBAL_MODULE) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + PyDictObject *dict = (PyDictObject *)GLOBALS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t version = read_u32(cache->module_keys_version); + DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + DISPATCH(); + } + + TARGET(LOAD_GLOBAL_BUILTIN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); + PyDictObject *mdict = (PyDictObject *)GLOBALS(); + PyDictObject *bdict = (PyDictObject *)BUILTINS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t mod_version = read_u32(cache->module_keys_version); + uint16_t bltn_version = cache->builtin_keys_version; + DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); + DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(bdict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + DISPATCH(); + } + + TARGET(DELETE_FAST) { + PyObject *v = GETLOCAL(oparg); + if (v == NULL) goto unbound_local_error; + SETLOCAL(oparg, NULL); + DISPATCH(); + } + + TARGET(MAKE_CELL) { + // "initial" is probably NULL but not if it's an arg (or set + // via PyFrame_LocalsToFast() before MAKE_CELL has run). + PyObject *initial = GETLOCAL(oparg); + PyObject *cell = PyCell_New(initial); + if (cell == NULL) { + goto resume_with_error; + } + SETLOCAL(oparg, cell); + DISPATCH(); + } + + TARGET(DELETE_DEREF) { + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + // Can't use ERROR_IF here. + // Fortunately we don't need its superpower. + if (oldobj == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PyCell_SET(cell, NULL); + Py_DECREF(oldobj); + DISPATCH(); + } + + TARGET(LOAD_CLASSDEREF) { + PyObject *name, *value, *locals = LOCALS(); + assert(locals); + assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); + name = PyTuple_GET_ITEM(frame->f_code->co_localsplusnames, oparg); + if (PyDict_CheckExact(locals)) { + value = PyDict_GetItemWithError(locals, name); + if (value != NULL) { + Py_INCREF(value); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + value = PyObject_GetItem(locals, name); + if (value == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + } + } + if (!value) { + PyObject *cell = GETLOCAL(oparg); + value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + Py_INCREF(value); + } + PUSH(value); + DISPATCH(); + } + + TARGET(LOAD_DEREF) { + PyObject *cell = GETLOCAL(oparg); + PyObject *value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PUSH(Py_NewRef(value)); + DISPATCH(); + } + + TARGET(STORE_DEREF) { + PyObject *v = POP(); + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + PyCell_SET(cell, v); + Py_XDECREF(oldobj); + DISPATCH(); + } + + TARGET(COPY_FREE_VARS) { + /* Copy closure variables to free variables */ + PyCodeObject *co = frame->f_code; + assert(PyFunction_Check(frame->f_funcobj)); + PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + int offset = co->co_nlocals + co->co_nplaincellvars; + assert(oparg == co->co_nfreevars); + for (int i = 0; i < oparg; ++i) { + PyObject *o = PyTuple_GET_ITEM(closure, i); + frame->localsplus[offset + i] = Py_NewRef(o); + } + DISPATCH(); + } + + TARGET(BUILD_STRING) { + PyObject *str; + str = _PyUnicode_JoinArray(&_Py_STR(empty), + stack_pointer - oparg, oparg); + if (str == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + Py_DECREF(item); + } + PUSH(str); + DISPATCH(); + } + + TARGET(BUILD_TUPLE) { + STACK_SHRINK(oparg); + PyObject *tup = _PyTuple_FromArraySteal(stack_pointer, oparg); + if (tup == NULL) + goto error; + PUSH(tup); + DISPATCH(); + } + + TARGET(BUILD_LIST) { + PyObject *list = PyList_New(oparg); + if (list == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + PyList_SET_ITEM(list, oparg, item); + } + PUSH(list); + DISPATCH(); + } + + TARGET(LIST_TO_TUPLE) { + PyObject *list = POP(); + PyObject *tuple = PyList_AsTuple(list); + Py_DECREF(list); + if (tuple == NULL) { + goto error; + } + PUSH(tuple); + DISPATCH(); + } + + TARGET(LIST_EXTEND) { + PyObject *iterable = POP(); + PyObject *list = PEEK(oparg); + PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); + if (none_val == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && + (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable))) + { + _PyErr_Clear(tstate); + _PyErr_Format(tstate, PyExc_TypeError, + "Value after * must be an iterable, not %.200s", + Py_TYPE(iterable)->tp_name); + } + Py_DECREF(iterable); + goto error; + } + Py_DECREF(none_val); + Py_DECREF(iterable); + DISPATCH(); + } + + TARGET(SET_UPDATE) { + PyObject *iterable = POP(); + PyObject *set = PEEK(oparg); + int err = _PySet_Update(set, iterable); + Py_DECREF(iterable); + if (err < 0) { + goto error; + } + DISPATCH(); + } + + TARGET(BUILD_SET) { + PyObject *set = PySet_New(NULL); + int err = 0; + int i; + if (set == NULL) + goto error; + for (i = oparg; i > 0; i--) { + PyObject *item = PEEK(i); + if (err == 0) + err = PySet_Add(set, item); + Py_DECREF(item); + } + STACK_SHRINK(oparg); + if (err != 0) { + Py_DECREF(set); + goto error; + } + PUSH(set); + DISPATCH(); + } + + TARGET(BUILD_MAP) { + PyObject *map = _PyDict_FromItems( + &PEEK(2*oparg), 2, + &PEEK(2*oparg - 1), 2, + oparg); + if (map == NULL) + goto error; + + while (oparg--) { + Py_DECREF(POP()); + Py_DECREF(POP()); + } + PUSH(map); + DISPATCH(); + } + + TARGET(SETUP_ANNOTATIONS) { + int err; + PyObject *ann_dict; + if (LOCALS() == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when setting up annotations"); + goto error; + } + /* check if __annotations__ in locals()... */ + if (PyDict_CheckExact(LOCALS())) { + ann_dict = _PyDict_GetItemWithError(LOCALS(), + &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (_PyErr_Occurred(tstate)) { + goto error; + } + /* ...if not, create a new one */ + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyDict_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + } + else { + /* do the same if locals() is not a dict */ + ann_dict = PyObject_GetItem(LOCALS(), &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + else { + Py_DECREF(ann_dict); + } + } + DISPATCH(); + } + + TARGET(BUILD_CONST_KEY_MAP) { + PyObject *map; + PyObject *keys = TOP(); + if (!PyTuple_CheckExact(keys) || + PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { + _PyErr_SetString(tstate, PyExc_SystemError, + "bad BUILD_CONST_KEY_MAP keys argument"); + goto error; + } + map = _PyDict_FromItems( + &PyTuple_GET_ITEM(keys, 0), 1, + &PEEK(oparg + 1), 1, oparg); + if (map == NULL) { + goto error; + } + + Py_DECREF(POP()); + while (oparg--) { + Py_DECREF(POP()); + } + PUSH(map); + DISPATCH(); + } + + TARGET(DICT_UPDATE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + if (PyDict_Update(dict, update) < 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object is not a mapping", + Py_TYPE(update)->tp_name); + } + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + DISPATCH(); + } + + TARGET(DICT_MERGE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + + if (_PyDict_MergeEx(dict, update, 2) < 0) { + format_kwargs_error(tstate, PEEK(2 + oparg), update); + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + PREDICT(CALL_FUNCTION_EX); + DISPATCH(); + } + + TARGET(MAP_ADD) { + PyObject *value = TOP(); + PyObject *key = SECOND(); + PyObject *map; + STACK_SHRINK(2); + map = PEEK(oparg); /* dict */ + assert(PyDict_CheckExact(map)); + /* map[key] = value */ + if (_PyDict_SetItem_Take2((PyDictObject *)map, key, value) != 0) { + goto error; + } + PREDICT(JUMP_BACKWARD); + DISPATCH(); + } + + TARGET(LOAD_ATTR) { + PREDICTED(LOAD_ATTR); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *name = GETITEM(names, oparg>>1); + next_instr -= OPSIZE; + _Py_Specialize_LoadAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg >> 1); + PyObject *owner = TOP(); + if (oparg & 1) { + /* Designed to work in tandem with CALL. */ + PyObject* meth = NULL; + + int meth_found = _PyObject_GetMethod(owner, name, &meth); + + if (meth == NULL) { + /* Most likely attribute wasn't found. */ + goto error; + } + + if (meth_found) { + /* We can bypass temporary bound method object. + meth is unbound method and obj is self. + + meth | self | arg1 | ... | argN + */ + SET_TOP(meth); + PUSH(owner); // self + } + else { + /* meth is not an unbound method (but a regular attr, or + something was returned by a descriptor protocol). Set + the second element of the stack to NULL, to signal + CALL that it's not a method call. + + NULL | meth | arg1 | ... | argN + */ + SET_TOP(NULL); + Py_DECREF(owner); + PUSH(meth); + } + } + else { + PyObject *res = PyObject_GetAttr(owner, name); + if (res == NULL) { + goto error; + } + Py_DECREF(owner); + SET_TOP(res); + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_INSTANCE_VALUE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_dictoffset < 0); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + res = _PyDictOrValues_GetValues(dorv)->values[cache->index]; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_MODULE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; + assert(dict != NULL); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->version), + LOAD_ATTR); + assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); + assert(cache->index < dict->ma_keys->dk_nentries); + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + cache->index; + res = ep->me_value; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_WITH_HINT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, LOAD_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg>>1); + uint16_t hint = cache->index; + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_SLOT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + char *addr = (char *)owner + cache->index; + res = *(PyObject **)addr; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_CLASS) { + assert(cframe.use_tracing == 0); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *cls = TOP(); + DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, + LOAD_ATTR); + assert(type_version != 0); + + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(cls); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_PROPERTY) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *fget = read_obj(cache->descr); + assert(Py_IS_TYPE(fget, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)fget; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 1); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(fget); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + for (int i = 1; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *getattribute = read_obj(cache->descr); + assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)getattribute; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + + PyObject *name = GETITEM(names, oparg >> 1); + Py_INCREF(f); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + new_frame->localsplus[1] = Py_NewRef(name); + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + TARGET(STORE_ATTR_INSTANCE_VALUE) { + PyObject *owner = PEEK(1); + PyObject *value = PEEK(2); + uint32_t type_version = read_u32(&next_instr[1].cache); + uint16_t index = read_u16(&next_instr[3].cache); + assert(cframe.use_tracing == 0); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); + STAT_INC(STORE_ATTR, hit); + PyDictValues *values = _PyDictOrValues_GetValues(dorv); + PyObject *old_value = values->values[index]; + values->values[index] = value; + if (old_value == NULL) { + _PyDictValues_AddToInsertionOrder(values, index); + } + else { + Py_DECREF(old_value); + } + Py_DECREF(owner); + STACK_SHRINK(2); + JUMPBY(4); + DISPATCH(); + } + + TARGET(STORE_ATTR_WITH_HINT) { + PyObject *owner = PEEK(1); + PyObject *value = PEEK(2); + uint32_t type_version = read_u32(&next_instr[1].cache); + uint16_t hint = read_u16(&next_instr[3].cache); + assert(cframe.use_tracing == 0); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), STORE_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, STORE_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg); + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR); + PyObject *old_value; + uint64_t new_version; + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + Py_DECREF(old_value); + STAT_INC(STORE_ATTR, hit); + /* Ensure dict is GC tracked if it needs to be */ + if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { + _PyObject_GC_TRACK(dict); + } + /* PEP 509 */ + dict->ma_version_tag = new_version; + Py_DECREF(owner); + STACK_SHRINK(2); + JUMPBY(4); + DISPATCH(); + } + + TARGET(STORE_ATTR_SLOT) { + PyObject *owner = PEEK(1); + PyObject *value = PEEK(2); + uint32_t type_version = read_u32(&next_instr[1].cache); + uint16_t index = read_u16(&next_instr[3].cache); + assert(cframe.use_tracing == 0); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + char *addr = (char *)owner + index; + STAT_INC(STORE_ATTR, hit); + PyObject *old_value = *(PyObject **)addr; + *(PyObject **)addr = value; + Py_XDECREF(old_value); + Py_DECREF(owner); + STACK_SHRINK(2); + JUMPBY(4); + DISPATCH(); + } + + TARGET(COMPARE_OP) { + PREDICTED(COMPARE_OP); + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *res; + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_CompareOp(left, right, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(COMPARE_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(oparg <= Py_GE); + res = PyObject_RichCompare(left, right, oparg); + Py_DECREF(left); + Py_DECREF(right); + if (res == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, res); + JUMPBY(2); + DISPATCH(); + } + + TARGET(COMPARE_OP_FLOAT_JUMP) { + PyObject *_tmp_1 = PEEK(1); + PyObject *_tmp_2 = PEEK(2); + { + PyObject *right = _tmp_1; + PyObject *left = _tmp_2; + size_t jump; + uint16_t when_to_jump_mask = read_u16(&next_instr[1].cache); + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false) + DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); + double dleft = PyFloat_AS_DOUBLE(left); + double dright = PyFloat_AS_DOUBLE(right); + // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask + int sign_ish = 2*(dleft > dright) + 2 - (dleft < dright); + DEOPT_IF(isnan(dleft), COMPARE_OP); + DEOPT_IF(isnan(dright), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + jump = sign_ish & when_to_jump_mask; + _tmp_2 = (PyObject *)jump; + } + JUMPBY(2); + NEXTOPARG(); + JUMPBY(OPSIZE); + { + size_t jump = (size_t)_tmp_2; + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + if (jump) { + JUMPBY(oparg); + } + } + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(COMPARE_OP_INT_JUMP) { + PyObject *_tmp_1 = PEEK(1); + PyObject *_tmp_2 = PEEK(2); + { + PyObject *right = _tmp_1; + PyObject *left = _tmp_2; + size_t jump; + uint16_t when_to_jump_mask = read_u16(&next_instr[1].cache); + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false) + DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); + Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0]; + Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0]; + // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask + int sign_ish = 2*(ileft > iright) + 2 - (ileft < iright); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + jump = sign_ish & when_to_jump_mask; + _tmp_2 = (PyObject *)jump; + } + JUMPBY(2); + NEXTOPARG(); + JUMPBY(OPSIZE); + { + size_t jump = (size_t)_tmp_2; + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + if (jump) { + JUMPBY(oparg); + } + } + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(COMPARE_OP_STR_JUMP) { + PyObject *_tmp_1 = PEEK(1); + PyObject *_tmp_2 = PEEK(2); + { + PyObject *right = _tmp_1; + PyObject *left = _tmp_2; + size_t jump; + uint16_t invert = read_u16(&next_instr[1].cache); + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false) + DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + int res = _PyUnicode_Equal(left, right); + assert(oparg == Py_EQ || oparg == Py_NE); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + assert(res == 0 || res == 1); + assert(invert == 0 || invert == 1); + jump = res ^ invert; + _tmp_2 = (PyObject *)jump; + } + JUMPBY(2); + NEXTOPARG(); + JUMPBY(OPSIZE); + { + size_t jump = (size_t)_tmp_2; + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + if (jump) { + JUMPBY(oparg); + } + } + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(IS_OP) { + PyObject *right = POP(); + PyObject *left = TOP(); + int res = Py_Is(left, right) ^ oparg; + PyObject *b = res ? Py_True : Py_False; + SET_TOP(Py_NewRef(b)); + Py_DECREF(left); + Py_DECREF(right); + DISPATCH(); + } + + TARGET(CONTAINS_OP) { + PyObject *right = POP(); + PyObject *left = POP(); + int res = PySequence_Contains(right, left); + Py_DECREF(left); + Py_DECREF(right); + if (res < 0) { + goto error; + } + PyObject *b = (res^oparg) ? Py_True : Py_False; + PUSH(Py_NewRef(b)); + DISPATCH(); + } + + TARGET(CHECK_EG_MATCH) { + PyObject *match_type = POP(); + if (check_except_star_type_valid(tstate, match_type) < 0) { + Py_DECREF(match_type); + goto error; + } + + PyObject *exc_value = TOP(); + PyObject *match = NULL, *rest = NULL; + int res = exception_group_match(exc_value, match_type, + &match, &rest); + Py_DECREF(match_type); + if (res < 0) { + goto error; + } + + if (match == NULL || rest == NULL) { + assert(match == NULL); + assert(rest == NULL); + goto error; + } + if (Py_IsNone(match)) { + PUSH(match); + Py_XDECREF(rest); + } + else { + /* Total or partial match - update the stack from + * [val] + * to + * [rest, match] + * (rest can be Py_None) + */ + + SET_TOP(rest); + PUSH(match); + PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); + Py_DECREF(exc_value); + } + DISPATCH(); + } + + TARGET(CHECK_EXC_MATCH) { + PyObject *right = POP(); + PyObject *left = TOP(); + assert(PyExceptionInstance_Check(left)); + if (check_except_type_valid(tstate, right) < 0) { + Py_DECREF(right); + goto error; + } + + int res = PyErr_GivenExceptionMatches(left, right); + Py_DECREF(right); + PUSH(Py_NewRef(res ? Py_True : Py_False)); + DISPATCH(); + } + + TARGET(IMPORT_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *fromlist = POP(); + PyObject *level = TOP(); + PyObject *res; + res = import_name(tstate, frame, name, fromlist, level); + Py_DECREF(level); + Py_DECREF(fromlist); + SET_TOP(res); + if (res == NULL) + goto error; + DISPATCH(); + } + + TARGET(IMPORT_STAR) { + PyObject *from = POP(), *locals; + int err; + if (_PyFrame_FastToLocalsWithError(frame) < 0) { + Py_DECREF(from); + goto error; + } + + locals = LOCALS(); + if (locals == NULL) { + _PyErr_SetString(tstate, PyExc_SystemError, + "no locals found during 'import *'"); + Py_DECREF(from); + goto error; + } + err = import_all_from(tstate, locals, from); + _PyFrame_LocalsToFast(frame, 0); + Py_DECREF(from); + if (err != 0) + goto error; + DISPATCH(); + } + + TARGET(IMPORT_FROM) { + PyObject *name = GETITEM(names, oparg); + PyObject *from = TOP(); + PyObject *res; + res = import_from(tstate, from, name); + PUSH(res); + if (res == NULL) + goto error; + DISPATCH(); + } + + TARGET(JUMP_FORWARD) { + JUMPBY(oparg); + DISPATCH(); + } + + TARGET(JUMP_BACKWARD) { + PREDICTED(JUMP_BACKWARD); + assert(oparg < INSTR_OFFSET()); + JUMPBY(-oparg); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(POP_JUMP_IF_FALSE) { + PREDICTED(POP_JUMP_IF_FALSE); + PyObject *cond = POP(); + if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) + ; + else if (err == 0) { + JUMPBY(oparg); + } + else + goto error; + } + DISPATCH(); + } + + TARGET(POP_JUMP_IF_TRUE) { + PyObject *cond = POP(); + if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) + ; + else + goto error; + } + DISPATCH(); + } + + TARGET(POP_JUMP_IF_NOT_NONE) { + PyObject *value = POP(); + if (!Py_IsNone(value)) { + JUMPBY(oparg); + } + Py_DECREF(value); + DISPATCH(); + } + + TARGET(POP_JUMP_IF_NONE) { + PyObject *value = POP(); + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + JUMPBY(oparg); + } + else { + Py_DECREF(value); + } + DISPATCH(); + } + + TARGET(JUMP_IF_FALSE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsTrue(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else if (err == 0) { + JUMPBY(oparg); + } + else { + goto error; + } + } + DISPATCH(); + } + + TARGET(JUMP_IF_TRUE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsFalse(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else { + goto error; + } + } + DISPATCH(); + } + + TARGET(JUMP_BACKWARD_NO_INTERRUPT) { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + JUMPBY(-oparg); + DISPATCH(); + } + + TARGET(GET_LEN) { + // PUSH(len(TOS)) + Py_ssize_t len_i = PyObject_Length(TOP()); + if (len_i < 0) { + goto error; + } + PyObject *len_o = PyLong_FromSsize_t(len_i); + if (len_o == NULL) { + goto error; + } + PUSH(len_o); + DISPATCH(); + } + + TARGET(MATCH_CLASS) { + // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or + // None on failure. + PyObject *names = POP(); + PyObject *type = POP(); + PyObject *subject = TOP(); + assert(PyTuple_CheckExact(names)); + PyObject *attrs = match_class(tstate, subject, type, oparg, names); + Py_DECREF(names); + Py_DECREF(type); + if (attrs) { + // Success! + assert(PyTuple_CheckExact(attrs)); + SET_TOP(attrs); + } + else if (_PyErr_Occurred(tstate)) { + // Error! + goto error; + } + else { + // Failure! + SET_TOP(Py_NewRef(Py_None)); + } + Py_DECREF(subject); + DISPATCH(); + } + + TARGET(MATCH_MAPPING) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + DISPATCH(); + } + + TARGET(MATCH_SEQUENCE) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + DISPATCH(); + } + + TARGET(MATCH_KEYS) { + // On successful match, PUSH(values). Otherwise, PUSH(None). + PyObject *keys = TOP(); + PyObject *subject = SECOND(); + PyObject *values_or_none = match_keys(tstate, subject, keys); + if (values_or_none == NULL) { + goto error; + } + PUSH(values_or_none); + DISPATCH(); + } + + TARGET(GET_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + DISPATCH(); + } + + TARGET(GET_YIELD_FROM_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter; + if (PyCoro_CheckExact(iterable)) { + /* `iterable` is a coroutine */ + if (!(frame->f_code->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) { + /* and it is used in a 'yield from' expression of a + regular generator. */ + Py_DECREF(iterable); + SET_TOP(NULL); + _PyErr_SetString(tstate, PyExc_TypeError, + "cannot 'yield from' a coroutine object " + "in a non-coroutine generator"); + goto error; + } + } + else if (!PyGen_CheckExact(iterable)) { + /* `iterable` is not a generator. */ + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + } + PREDICT(LOAD_CONST); + DISPATCH(); + } + + TARGET(FOR_ITER) { + PREDICTED(FOR_ITER); + _PyForIterCache *cache = (_PyForIterCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_ForIter(TOP(), next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(FOR_ITER, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* before: [iter]; after: [iter, iter()] *or* [] */ + PyObject *iter = TOP(); + PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); + if (next != NULL) { + PUSH(next); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + } + else { + if (_PyErr_Occurred(tstate)) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { + goto error; + } + else if (tstate->c_tracefunc != NULL) { + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + } + _PyErr_Clear(tstate); + } + /* iterator ended normally */ + assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR); + STACK_SHRINK(1); + Py_DECREF(iter); + /* Skip END_FOR */ + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + } + DISPATCH(); + } + + TARGET(FOR_ITER_LIST) { + assert(cframe.use_tracing == 0); + _PyListIterObject *it = (_PyListIterObject *)TOP(); + DEOPT_IF(Py_TYPE(it) != &PyListIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + PyListObject *seq = it->it_seq; + if (seq) { + if (it->it_index < PyList_GET_SIZE(seq)) { + PyObject *next = PyList_GET_ITEM(seq, it->it_index++); + PUSH(Py_NewRef(next)); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + goto end_for_iter_list; // End of this instruction + } + it->it_seq = NULL; + Py_DECREF(seq); + } + STACK_SHRINK(1); + Py_DECREF(it); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + end_for_iter_list: + DISPATCH(); + } + + TARGET(FOR_ITER_TUPLE) { + assert(cframe.use_tracing == 0); + _PyTupleIterObject *it = (_PyTupleIterObject *)TOP(); + DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + PyTupleObject *seq = it->it_seq; + if (seq) { + if (it->it_index < PyTuple_GET_SIZE(seq)) { + PyObject *next = PyTuple_GET_ITEM(seq, it->it_index++); + PUSH(Py_NewRef(next)); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + goto end_for_iter_tuple; // End of this instruction + } + it->it_seq = NULL; + Py_DECREF(seq); + } + STACK_SHRINK(1); + Py_DECREF(it); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + end_for_iter_tuple: + DISPATCH(); + } + + TARGET(FOR_ITER_RANGE) { + assert(cframe.use_tracing == 0); + _PyRangeIterObject *r = (_PyRangeIterObject *)TOP(); + DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER]; + assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST); + if (r->len <= 0) { + STACK_SHRINK(1); + Py_DECREF(r); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + OPSIZE); + } + else { + long value = r->start; + r->start = value + r->step; + r->len--; + if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) { + goto error; + } + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + OPSIZE); + } + DISPATCH(); + } + + TARGET(FOR_ITER_GEN) { + assert(cframe.use_tracing == 0); + PyGenObject *gen = (PyGenObject *)TOP(); + DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); + DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + frame->yield_offset = oparg; + _PyFrame_StackPush(gen_frame, Py_NewRef(Py_None)); + gen->gi_frame_state = FRAME_EXECUTING; + gen->gi_exc_state.previous_item = tstate->exc_info; + tstate->exc_info = &gen->gi_exc_state; + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); + assert(_Py_OPCODE(*next_instr) == END_FOR); + DISPATCH_INLINED(gen_frame); + } + + TARGET(BEFORE_ASYNC_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol " + "(missed __aexit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) + goto error; + PUSH(res); + PREDICT(GET_AWAITABLE); + DISPATCH(); + } + + TARGET(BEFORE_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol " + "(missed __exit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) { + goto error; + } + PUSH(res); + DISPATCH(); + } + + TARGET(WITH_EXCEPT_START) { + PyObject *val = PEEK(1); + PyObject *lasti = PEEK(3); + PyObject *exit_func = PEEK(4); + PyObject *res; + /* At the top of the stack are 4 values: + - val: TOP = exc_info() + - unused: SECOND = previous exception + - lasti: THIRD = lasti of exception in exc_info() + - exit_func: FOURTH = the context.__exit__ bound method + We call FOURTH(type(TOP), TOP, GetTraceback(TOP)). + Then we push the __exit__ return value. + */ + PyObject *exc, *tb; + + assert(val && PyExceptionInstance_Check(val)); + exc = PyExceptionInstance_Class(val); + tb = PyException_GetTraceback(val); + Py_XDECREF(tb); + assert(PyLong_Check(lasti)); + (void)lasti; // Shut up compiler warning if asserts are off + PyObject *stack[4] = {NULL, exc, val, tb}; + res = PyObject_Vectorcall(exit_func, stack + 1, + 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); + if (res == NULL) goto error; + STACK_GROW(1); + POKE(1, res); + DISPATCH(); + } + + TARGET(PUSH_EXC_INFO) { + PyObject *value = TOP(); + + _PyErr_StackItem *exc_info = tstate->exc_info; + if (exc_info->exc_value != NULL) { + SET_TOP(exc_info->exc_value); + } + else { + SET_TOP(Py_NewRef(Py_None)); + } + + PUSH(Py_NewRef(value)); + assert(PyExceptionInstance_Check(value)); + exc_info->exc_value = value; + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { + /* Cached method object */ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + assert(type_version != 0); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; + DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + read_u32(cache->keys_version), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_WITH_DICT) { + /* Can be either a managed dict, or a tp_dictoffset offset.*/ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + DEOPT_IF(self_cls->tp_version_tag != read_u32(cache->type_version), + LOAD_ATTR); + /* Treat index as a signed 16 bit value */ + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyDictObject **dictptr = (PyDictObject**)(((char *)self)+dictoffset); + PyDictObject *dict = *dictptr; + DEOPT_IF(dict == NULL, LOAD_ATTR); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->keys_version), + LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_NO_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_dictoffset == 0); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyObject *dict = *(PyObject **)((char *)self + dictoffset); + /* This object has a __dict__, just not yet created */ + DEOPT_IF(dict != NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { + DEOPT_IF(is_method(stack_pointer, oparg), CALL); + PyObject *function = PEEK(oparg + 1); + DEOPT_IF(Py_TYPE(function) != &PyMethod_Type, CALL); + STAT_INC(CALL, hit); + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg + 1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg + 2) = Py_NewRef(meth); + Py_DECREF(function); + GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); + } + + TARGET(KW_NAMES) { + assert(kwnames == NULL); + assert(oparg < PyTuple_GET_SIZE(consts)); + kwnames = GETITEM(consts, oparg); + DISPATCH(); + } + + TARGET(CALL) { + PREDICTED(CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + int is_meth = is_method(stack_pointer, oparg); + int nargs = oparg + is_meth; + PyObject *callable = PEEK(nargs + 1); + next_instr -= OPSIZE; + _Py_Specialize_Call(callable, next_instr, nargs, kwnames); + DISPATCH_SAME_OPARG(); + } + STAT_INC(CALL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int total_args, is_meth; + is_meth = is_method(stack_pointer, oparg); + PyObject *function = PEEK(oparg + 1); + if (!is_meth && Py_TYPE(function) == &PyMethod_Type) { + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg+1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg+2) = Py_NewRef(meth); + Py_DECREF(function); + is_meth = 1; + } + total_args = oparg + is_meth; + function = PEEK(total_args + 1); + int positional_args = total_args - KWNAMES_LEN(); + // Check if the call can be inlined or not + if (Py_TYPE(function) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)function)->vectorcall == _PyFunction_Vectorcall) + { + int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(function)); + STACK_SHRINK(total_args); + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( + tstate, (PyFunctionObject *)function, locals, + stack_pointer, positional_args, kwnames + ); + kwnames = NULL; + STACK_SHRINK(2-is_meth); + // The frame has stolen all the arguments from the stack, + // so there is no need to clean them up. + if (new_frame == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + /* Callable is not a normal Python function */ + PyObject *res; + if (cframe.use_tracing) { + res = trace_call_function( + tstate, function, stack_pointer-total_args, + positional_args, kwnames); + } + else { + res = PyObject_Vectorcall( + function, stack_pointer-total_args, + positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames); + } + kwnames = NULL; + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(function); + /* Clear the stack */ + STACK_SHRINK(total_args); + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_PY_EXACT_ARGS) { + PREDICTED(CALL_PY_EXACT_ARGS); + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(code->co_argcount != argcount, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + TARGET(CALL_PY_WITH_DEFAULTS) { + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(argcount > code->co_argcount, CALL); + int minargs = cache->min_args; + DEOPT_IF(argcount < minargs, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_argcount; i++) { + PyObject *def = PyTuple_GET_ITEM(func->func_defaults, + i - minargs); + new_frame->localsplus[i] = Py_NewRef(def); + } + for (int i = code->co_argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + TARGET(CALL_NO_KW_TYPE_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *obj = TOP(); + PyObject *callable = SECOND(); + DEOPT_IF(callable != (PyObject *)&PyType_Type, CALL); + STAT_INC(CALL, hit); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + PyObject *res = Py_NewRef(Py_TYPE(obj)); + Py_DECREF(callable); + Py_DECREF(obj); + STACK_SHRINK(2); + SET_TOP(res); + DISPATCH(); + } + + TARGET(CALL_NO_KW_STR_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PyObject_Str(arg); + Py_DECREF(arg); + Py_DECREF(&PyUnicode_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_TUPLE_1) { + assert(kwnames == NULL); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyTuple_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PySequence_Tuple(arg); + Py_DECREF(arg); + Py_DECREF(&PyTuple_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_BUILTIN_CLASS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + int kwnames_len = KWNAMES_LEN(); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyType_Check(callable), CALL); + PyTypeObject *tp = (PyTypeObject *)callable; + DEOPT_IF(tp->tp_vectorcall == NULL, CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + PyObject *res = tp->tp_vectorcall((PyObject *)tp, stack_pointer, + total_args-kwnames_len, kwnames); + kwnames = NULL; + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(tp); + STACK_SHRINK(1-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_BUILTIN_O) { + assert(cframe.use_tracing == 0); + /* Builtin METH_O functions */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *arg = TOP(); + PyObject *res = _PyCFunction_TrampolineCall(cfunc, PyCFunction_GET_SELF(callable), arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + Py_DECREF(arg); + Py_DECREF(callable); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_BUILTIN_FAST) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL functions, without keywords */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL, + CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs) */ + PyObject *res = ((_PyCFunctionFast)(void(*)(void))cfunc)( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + /* Not deopting because this doesn't mean our optimization was + wrong. `res` can be NULL for valid reasons. Eg. getattr(x, + 'invalid'). In those cases an exception is set, so we must + handle it. + */ + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_BUILTIN_FAST_WITH_KEYWORDS) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != + (METH_FASTCALL | METH_KEYWORDS), CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs, kwnames) */ + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void)) + PyCFunction_GET_FUNCTION(callable); + PyObject *res = cfunc( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args - KWNAMES_LEN(), + kwnames + ); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_LEN) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* len(o) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.len, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + Py_ssize_t len_i = PyObject_Length(arg); + if (len_i < 0) { + goto error; + } + PyObject *res = PyLong_FromSsize_t(len_i); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + Py_DECREF(arg); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH(); + } + + TARGET(CALL_NO_KW_ISINSTANCE) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* isinstance(o, o2) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); + STAT_INC(CALL, hit); + PyObject *cls = POP(); + PyObject *inst = TOP(); + int retval = PyObject_IsInstance(inst, cls); + if (retval < 0) { + Py_DECREF(cls); + goto error; + } + PyObject *res = PyBool_FromLong(retval); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(inst); + Py_DECREF(cls); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH(); + } + + TARGET(CALL_NO_KW_LIST_APPEND) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + assert(oparg == 1); + PyObject *callable = PEEK(3); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.list_append, CALL); + PyObject *list = SECOND(); + DEOPT_IF(!PyList_Check(list), CALL); + STAT_INC(CALL, hit); + PyObject *arg = POP(); + if (_PyList_AppendTakeRef((PyListObject *)list, arg) < 0) { + goto error; + } + STACK_SHRINK(2); + Py_DECREF(list); + Py_DECREF(callable); + // CALL + POP_TOP + JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); + assert(_Py_OPCODE(next_instr[-1]) == POP_TOP); + DISPATCH(); + } + + TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_O, CALL); + PyObject *arg = TOP(); + PyObject *self = SECOND(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + Py_DECREF(arg); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); + PyTypeObject *d_type = callable->d_common.d_type; + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); + STAT_INC(CALL, hit); + int nargs = total_args-1; + STACK_SHRINK(nargs); + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; + PyObject *res = cfunc(self, stack_pointer, nargs - KWNAMES_LEN(), + kwnames); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { + assert(kwnames == NULL); + assert(oparg == 0 || oparg == 1); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + PyObject *self = TOP(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, NULL); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + /* Builtin METH_FASTCALL methods, without keywords */ + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + _PyCFunctionFast cfunc = + (_PyCFunctionFast)(void(*)(void))meth->ml_meth; + int nargs = total_args-1; + STACK_SHRINK(nargs); + PyObject *res = cfunc(self, stack_pointer, nargs); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + /* Clear the stack of the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_FUNCTION_EX) { + PREDICTED(CALL_FUNCTION_EX); + PyObject *func, *callargs, *kwargs = NULL, *result; + if (oparg & 0x01) { + kwargs = POP(); + // DICT_MERGE is called before this opcode if there are kwargs. + // It converts all dict subtypes in kwargs into regular dicts. + assert(PyDict_CheckExact(kwargs)); + } + callargs = POP(); + func = TOP(); + if (!PyTuple_CheckExact(callargs)) { + if (check_args_iterable(tstate, func, callargs) < 0) { + Py_DECREF(callargs); + goto error; + } + Py_SETREF(callargs, PySequence_Tuple(callargs)); + if (callargs == NULL) { + goto error; + } + } + assert(PyTuple_CheckExact(callargs)); + + result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); + Py_DECREF(func); + Py_DECREF(callargs); + Py_XDECREF(kwargs); + + STACK_SHRINK(1); + assert(TOP() == NULL); + SET_TOP(result); + if (result == NULL) { + goto error; + } + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(MAKE_FUNCTION) { + PyObject *codeobj = POP(); + PyFunctionObject *func = (PyFunctionObject *) + PyFunction_New(codeobj, GLOBALS()); + + Py_DECREF(codeobj); + if (func == NULL) { + goto error; + } + + if (oparg & 0x08) { + assert(PyTuple_CheckExact(TOP())); + func->func_closure = POP(); + } + if (oparg & 0x04) { + assert(PyTuple_CheckExact(TOP())); + func->func_annotations = POP(); + } + if (oparg & 0x02) { + assert(PyDict_CheckExact(TOP())); + func->func_kwdefaults = POP(); + } + if (oparg & 0x01) { + assert(PyTuple_CheckExact(TOP())); + func->func_defaults = POP(); + } + + func->func_version = ((PyCodeObject *)codeobj)->co_version; + PUSH((PyObject *)func); + DISPATCH(); + } + + TARGET(RETURN_GENERATOR) { + assert(PyFunction_Check(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); + if (gen == NULL) { + goto error; + } + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + _PyFrame_Copy(frame, gen_frame); + assert(frame->frame_obj == NULL); + gen->gi_frame_state = FRAME_CREATED; + gen_frame->owner = FRAME_OWNED_BY_GENERATOR; + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + _PyInterpreterFrame *prev = frame->previous; + _PyThreadState_PopFrame(tstate, frame); + frame = cframe.current_frame = prev; + _PyFrame_StackPush(frame, (PyObject *)gen); + goto resume_frame; + } + + TARGET(BUILD_SLICE) { + PyObject *start, *stop, *step, *slice; + if (oparg == 3) + step = POP(); + else + step = NULL; + stop = POP(); + start = TOP(); + slice = PySlice_New(start, stop, step); + Py_DECREF(start); + Py_DECREF(stop); + Py_XDECREF(step); + SET_TOP(slice); + if (slice == NULL) + goto error; + DISPATCH(); + } + + TARGET(FORMAT_VALUE) { + /* Handles f-string value formatting. */ + PyObject *result; + PyObject *fmt_spec; + PyObject *value; + PyObject *(*conv_fn)(PyObject *); + int which_conversion = oparg & FVC_MASK; + int have_fmt_spec = (oparg & FVS_MASK) == FVS_HAVE_SPEC; + + fmt_spec = have_fmt_spec ? POP() : NULL; + value = POP(); + + /* See if any conversion is specified. */ + switch (which_conversion) { + case FVC_NONE: conv_fn = NULL; break; + case FVC_STR: conv_fn = PyObject_Str; break; + case FVC_REPR: conv_fn = PyObject_Repr; break; + case FVC_ASCII: conv_fn = PyObject_ASCII; break; + default: + _PyErr_Format(tstate, PyExc_SystemError, + "unexpected conversion flag %d", + which_conversion); + goto error; + } + + /* If there's a conversion function, call it and replace + value with that result. Otherwise, just use value, + without conversion. */ + if (conv_fn != NULL) { + result = conv_fn(value); + Py_DECREF(value); + if (result == NULL) { + Py_XDECREF(fmt_spec); + goto error; + } + value = result; + } + + /* If value is a unicode object, and there's no fmt_spec, + then we know the result of format(value) is value + itself. In that case, skip calling format(). I plan to + move this optimization in to PyObject_Format() + itself. */ + if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { + /* Do nothing, just transfer ownership to result. */ + result = value; + } else { + /* Actually call format(). */ + result = PyObject_Format(value, fmt_spec); + Py_DECREF(value); + Py_XDECREF(fmt_spec); + if (result == NULL) { + goto error; + } + } + + PUSH(result); + DISPATCH(); + } + + TARGET(COPY) { + assert(oparg != 0); + PyObject *peek = PEEK(oparg); + PUSH(Py_NewRef(peek)); + DISPATCH(); + } + + TARGET(BINARY_OP) { + PREDICTED(BINARY_OP); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + PyObject *rhs = PEEK(1); + PyObject *lhs = PEEK(2); + PyObject *res; + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr -= OPSIZE; + _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(0 <= oparg); + assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); + assert(binary_ops[oparg]); + res = binary_ops[oparg](lhs, rhs); + Py_DECREF(lhs); + Py_DECREF(rhs); + if (res == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, res); + JUMPBY(1); + DISPATCH(); + } + + TARGET(SWAP) { + assert(oparg != 0); + PyObject *top = TOP(); + SET_TOP(PEEK(oparg)); + PEEK(oparg) = top; + DISPATCH(); + } + + TARGET(EXTENDED_ARG) { + assert(oparg); + assert(cframe.use_tracing == 0); + opcode = _Py_OPCODE(*next_instr); + oparg = oparg << 8 | _Py_OPARG(*next_instr); + PRE_DISPATCH_GOTO(); + DISPATCH_GOTO(); + } + + TARGET(CACHE) { + Py_UNREACHABLE(); + } diff --git a/Python/getargs.c b/Python/getargs.c index f0b84b8338ddc3..0167dd753d88f7 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -1046,8 +1046,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, /* Encode object */ if (!recode_strings && (PyBytes_Check(arg) || PyByteArray_Check(arg))) { - s = arg; - Py_INCREF(s); + s = Py_NewRef(arg); if (PyBytes_Check(arg)) { size = PyBytes_GET_SIZE(s); ptr = PyBytes_AS_STRING(s); @@ -1847,9 +1846,6 @@ vgetargskeywords(PyObject *args, PyObject *kwargs, const char *format, } -/* List of static parsers. */ -static struct _PyArg_Parser *static_arg_parsers = NULL; - static int scan_keywords(const char * const *keywords, int *ptotal, int *pposonly) { @@ -2025,8 +2021,8 @@ _parser_init(struct _PyArg_Parser *parser) parser->initialized = owned ? 1 : -1; assert(parser->next == NULL); - parser->next = static_arg_parsers; - static_arg_parsers = parser; + parser->next = _PyRuntime.getargs.static_parsers; + _PyRuntime.getargs.static_parsers = parser; return 1; } @@ -2575,8 +2571,7 @@ _PyArg_UnpackKeywordsWithVararg(PyObject *const *args, Py_ssize_t nargs, /* copy tuple args */ for (i = 0; i < nargs; i++) { if (i >= vararg) { - Py_INCREF(args[i]); - PyTuple_SET_ITEM(buf[vararg], i - vararg, args[i]); + PyTuple_SET_ITEM(buf[vararg], i - vararg, Py_NewRef(args[i])); continue; } else { @@ -2603,7 +2598,25 @@ _PyArg_UnpackKeywordsWithVararg(PyObject *const *args, Py_ssize_t nargs, current_arg = NULL; } - buf[i + vararg + 1] = current_arg; + /* If an arguments is passed in as a keyword argument, + * it should be placed before `buf[vararg]`. + * + * For example: + * def f(a, /, b, *args): + * pass + * f(1, b=2) + * + * This `buf` array should be: [1, 2, NULL]. + * In this case, nargs < vararg. + * + * Otherwise, we leave a place at `buf[vararg]` for vararg tuple + * so the index is `i + 1`. */ + if (nargs < vararg) { + buf[i] = current_arg; + } + else { + buf[i + 1] = current_arg; + } if (current_arg) { --nkwargs; @@ -2932,14 +2945,14 @@ _PyArg_NoKwnames(const char *funcname, PyObject *kwnames) void _PyArg_Fini(void) { - struct _PyArg_Parser *tmp, *s = static_arg_parsers; + struct _PyArg_Parser *tmp, *s = _PyRuntime.getargs.static_parsers; while (s) { tmp = s->next; s->next = NULL; parser_clear(s); s = tmp; } - static_arg_parsers = NULL; + _PyRuntime.getargs.static_parsers = NULL; } #ifdef __cplusplus diff --git a/Python/getversion.c b/Python/getversion.c index 46910451fdf859..5db836ab4bfd6d 100644 --- a/Python/getversion.c +++ b/Python/getversion.c @@ -5,12 +5,23 @@ #include "patchlevel.h" -const char * -Py_GetVersion(void) +static int initialized = 0; +static char version[250]; + +void _Py_InitVersion(void) { - static char version[250]; + if (initialized) { + return; + } + initialized = 1; PyOS_snprintf(version, sizeof(version), "%.80s (%.80s) %.80s", PY_VERSION, Py_GetBuildInfo(), Py_GetCompiler()); +} + +const char * +Py_GetVersion(void) +{ + _Py_InitVersion(); return version; } diff --git a/Python/hamt.c b/Python/hamt.c index 3aa31c625824cb..8cb94641bef251 100644 --- a/Python/hamt.c +++ b/Python/hamt.c @@ -319,13 +319,6 @@ typedef struct { } PyHamtNode_Array; -typedef struct { - PyObject_VAR_HEAD - uint32_t b_bitmap; - PyObject *b_array[1]; -} PyHamtNode_Bitmap; - - typedef struct { PyObject_VAR_HEAD int32_t c_hash; @@ -333,10 +326,6 @@ typedef struct { } PyHamtNode_Collision; -static PyHamtNode_Bitmap *_empty_bitmap_node; -static PyHamtObject *_empty_hamt; - - static PyHamtObject * hamt_alloc(void); @@ -521,14 +510,16 @@ hamt_node_bitmap_new(Py_ssize_t size) PyHamtNode_Bitmap *node; Py_ssize_t i; + if (size == 0) { + /* Since bitmap nodes are immutable, we can cache the instance + for size=0 and reuse it whenever we need an empty bitmap node. + */ + return (PyHamtNode *)Py_NewRef(&_Py_SINGLETON(hamt_bitmap_node_empty)); + } + assert(size >= 0); assert(size % 2 == 0); - if (size == 0 && _empty_bitmap_node != NULL) { - Py_INCREF(_empty_bitmap_node); - return (PyHamtNode *)_empty_bitmap_node; - } - /* No freelist; allocate a new bitmap node */ node = PyObject_GC_NewVar( PyHamtNode_Bitmap, &_PyHamt_BitmapNode_Type, size); @@ -546,14 +537,6 @@ hamt_node_bitmap_new(Py_ssize_t size) _PyObject_GC_TRACK(node); - if (size == 0 && _empty_bitmap_node == NULL) { - /* Since bitmap nodes are immutable, we can cache the instance - for size=0 and reuse it whenever we need an empty bitmap node. - */ - _empty_bitmap_node = node; - Py_INCREF(_empty_bitmap_node); - } - return (PyHamtNode *)node; } @@ -577,8 +560,7 @@ hamt_node_bitmap_clone(PyHamtNode_Bitmap *node) } for (i = 0; i < Py_SIZE(node); i++) { - Py_XINCREF(node->b_array[i]); - clone->b_array[i] = node->b_array[i]; + clone->b_array[i] = Py_XNewRef(node->b_array[i]); } clone->b_bitmap = node->b_bitmap; @@ -603,14 +585,12 @@ hamt_node_bitmap_clone_without(PyHamtNode_Bitmap *o, uint32_t bit) uint32_t i; for (i = 0; i < key_idx; i++) { - Py_XINCREF(o->b_array[i]); - new->b_array[i] = o->b_array[i]; + new->b_array[i] = Py_XNewRef(o->b_array[i]); } assert(Py_SIZE(o) >= 0 && Py_SIZE(o) <= 32); for (i = val_idx + 1; i < (uint32_t)Py_SIZE(o); i++) { - Py_XINCREF(o->b_array[i]); - new->b_array[i - 2] = o->b_array[i]; + new->b_array[i - 2] = Py_XNewRef(o->b_array[i]); } new->b_bitmap = o->b_bitmap & ~bit; @@ -643,15 +623,11 @@ hamt_node_new_bitmap_or_collision(uint32_t shift, return NULL; } - Py_INCREF(key1); - n->c_array[0] = key1; - Py_INCREF(val1); - n->c_array[1] = val1; + n->c_array[0] = Py_NewRef(key1); + n->c_array[1] = Py_NewRef(val1); - Py_INCREF(key2); - n->c_array[2] = key2; - Py_INCREF(val2); - n->c_array[3] = val2; + n->c_array[2] = Py_NewRef(key2); + n->c_array[3] = Py_NewRef(val2); return (PyHamtNode *)n; } @@ -736,8 +712,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (val_or_node == (PyObject *)sub_node) { Py_DECREF(sub_node); - Py_INCREF(self); - return (PyHamtNode *)self; + return (PyHamtNode *)Py_NewRef(self); } PyHamtNode_Bitmap *ret = hamt_node_bitmap_clone(self); @@ -759,8 +734,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (comp_err == 1) { /* key == key_or_null */ if (val == val_or_node) { /* we already have the same key/val pair; return self. */ - Py_INCREF(self); - return (PyHamtNode *)self; + return (PyHamtNode *)Py_NewRef(self); } /* We're setting a new value for the key we had before. @@ -769,8 +743,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (ret == NULL) { return NULL; } - Py_INCREF(val); - Py_SETREF(ret->b_array[val_idx], val); + Py_SETREF(ret->b_array[val_idx], Py_NewRef(val)); return (PyHamtNode *)ret; } @@ -865,8 +838,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (self->b_array[j] == NULL) { new_node->a_array[i] = - (PyHamtNode *)self->b_array[j + 1]; - Py_INCREF(new_node->a_array[i]); + (PyHamtNode *)Py_NewRef(self->b_array[j + 1]); } else { int32_t rehash = hamt_hash(self->b_array[j]); @@ -923,22 +895,18 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, /* Copy all keys/values that will be before the new key/value we are adding. */ for (i = 0; i < key_idx; i++) { - Py_XINCREF(self->b_array[i]); - new_node->b_array[i] = self->b_array[i]; + new_node->b_array[i] = Py_XNewRef(self->b_array[i]); } /* Set the new key/value to the new Bitmap node. */ - Py_INCREF(key); - new_node->b_array[key_idx] = key; - Py_INCREF(val); - new_node->b_array[val_idx] = val; + new_node->b_array[key_idx] = Py_NewRef(key); + new_node->b_array[val_idx] = Py_NewRef(val); /* Copy all keys/values that will be after the new key/value we are adding. */ assert(Py_SIZE(self) >= 0 && Py_SIZE(self) <= 32); for (i = key_idx; i < (uint32_t)Py_SIZE(self); i++) { - Py_XINCREF(self->b_array[i]); - new_node->b_array[i + 2] = self->b_array[i]; + new_node->b_array[i + 2] = Py_XNewRef(self->b_array[i]); } new_node->b_bitmap = self->b_bitmap | bit; @@ -1019,10 +987,8 @@ hamt_node_bitmap_without(PyHamtNode_Bitmap *self, PyObject *key = sub_tree->b_array[0]; PyObject *val = sub_tree->b_array[1]; - Py_INCREF(key); - Py_XSETREF(clone->b_array[key_idx], key); - Py_INCREF(val); - Py_SETREF(clone->b_array[val_idx], val); + Py_XSETREF(clone->b_array[key_idx], Py_NewRef(key)); + Py_SETREF(clone->b_array[val_idx], Py_NewRef(val)); Py_DECREF(sub_tree); @@ -1160,6 +1126,16 @@ hamt_node_bitmap_dealloc(PyHamtNode_Bitmap *self) Py_ssize_t len = Py_SIZE(self); Py_ssize_t i; + if (Py_SIZE(self) == 0) { + /* The empty node is statically allocated. */ + assert(self == &_Py_SINGLETON(hamt_bitmap_node_empty)); +#ifdef Py_DEBUG + _Py_FatalRefcountError("deallocating the empty hamt node bitmap singleton"); +#else + return; +#endif + } + PyObject_GC_UnTrack(self); Py_TRASHCAN_BEGIN(self, hamt_node_bitmap_dealloc) @@ -1343,14 +1319,11 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, } for (i = 0; i < Py_SIZE(self); i++) { - Py_INCREF(self->c_array[i]); - new_node->c_array[i] = self->c_array[i]; + new_node->c_array[i] = Py_NewRef(self->c_array[i]); } - Py_INCREF(key); - new_node->c_array[i] = key; - Py_INCREF(val); - new_node->c_array[i + 1] = val; + new_node->c_array[i] = Py_NewRef(key); + new_node->c_array[i + 1] = Py_NewRef(val); *added_leaf = 1; return (PyHamtNode *)new_node; @@ -1364,8 +1337,7 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, if (self->c_array[val_idx] == val) { /* We're setting a key/value pair that's already set. */ - Py_INCREF(self); - return (PyHamtNode *)self; + return (PyHamtNode *)Py_NewRef(self); } /* We need to replace old value for the key @@ -1378,14 +1350,11 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, /* Copy all elements of the old node to the new one. */ for (i = 0; i < Py_SIZE(self); i++) { - Py_INCREF(self->c_array[i]); - new_node->c_array[i] = self->c_array[i]; + new_node->c_array[i] = Py_NewRef(self->c_array[i]); } /* Replace the old value with the new value for the our key. */ - Py_DECREF(new_node->c_array[val_idx]); - Py_INCREF(val); - new_node->c_array[val_idx] = val; + Py_SETREF(new_node->c_array[val_idx], Py_NewRef(val)); return (PyHamtNode *)new_node; @@ -1410,8 +1379,7 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, return NULL; } new_node->b_bitmap = hamt_bitpos(self->c_hash, shift); - Py_INCREF(self); - new_node->b_array[1] = (PyObject*) self; + new_node->b_array[1] = Py_NewRef(self); assoc_res = hamt_node_bitmap_assoc( new_node, shift, hash, key, val, added_leaf); @@ -1473,17 +1441,13 @@ hamt_node_collision_without(PyHamtNode_Collision *self, } if (key_idx == 0) { - Py_INCREF(self->c_array[2]); - node->b_array[0] = self->c_array[2]; - Py_INCREF(self->c_array[3]); - node->b_array[1] = self->c_array[3]; + node->b_array[0] = Py_NewRef(self->c_array[2]); + node->b_array[1] = Py_NewRef(self->c_array[3]); } else { assert(key_idx == 2); - Py_INCREF(self->c_array[0]); - node->b_array[0] = self->c_array[0]; - Py_INCREF(self->c_array[1]); - node->b_array[1] = self->c_array[1]; + node->b_array[0] = Py_NewRef(self->c_array[0]); + node->b_array[1] = Py_NewRef(self->c_array[1]); } node->b_bitmap = hamt_bitpos(hash, shift); @@ -1504,12 +1468,10 @@ hamt_node_collision_without(PyHamtNode_Collision *self, /* Copy all other keys from `self` to `new` */ Py_ssize_t i; for (i = 0; i < key_idx; i++) { - Py_INCREF(self->c_array[i]); - new->c_array[i] = self->c_array[i]; + new->c_array[i] = Py_NewRef(self->c_array[i]); } for (i = key_idx + 2; i < Py_SIZE(self); i++) { - Py_INCREF(self->c_array[i]); - new->c_array[i - 2] = self->c_array[i]; + new->c_array[i - 2] = Py_NewRef(self->c_array[i]); } *new_node = (PyHamtNode*)new; @@ -1661,8 +1623,7 @@ hamt_node_array_clone(PyHamtNode_Array *node) /* Copy all elements from the current Array node to the new one. */ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { - Py_XINCREF(node->a_array[i]); - clone->a_array[i] = node->a_array[i]; + clone->a_array[i] = (PyHamtNode*)Py_XNewRef(node->a_array[i]); } VALIDATE_ARRAY_NODE(clone) @@ -1719,8 +1680,7 @@ hamt_node_array_assoc(PyHamtNode_Array *self, /* Copy all elements from the current Array node to the new one. */ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { - Py_XINCREF(self->a_array[i]); - new_node->a_array[i] = self->a_array[i]; + new_node->a_array[i] = (PyHamtNode*)Py_XNewRef(self->a_array[i]); } assert(new_node->a_array[idx] == NULL); @@ -1868,15 +1828,12 @@ hamt_node_array_without(PyHamtNode_Array *self, PyObject *key = child->b_array[0]; PyObject *val = child->b_array[1]; - Py_INCREF(key); - new->b_array[new_i] = key; - Py_INCREF(val); - new->b_array[new_i + 1] = val; + new->b_array[new_i] = Py_NewRef(key); + new->b_array[new_i + 1] = Py_NewRef(val); } else { new->b_array[new_i] = NULL; - Py_INCREF(node); - new->b_array[new_i + 1] = (PyObject*)node; + new->b_array[new_i + 1] = Py_NewRef(node); } } else { @@ -1894,8 +1851,7 @@ hamt_node_array_without(PyHamtNode_Array *self, /* Just copy the node into our new Bitmap */ new->b_array[new_i] = NULL; - Py_INCREF(node); - new->b_array[new_i + 1] = (PyObject*)node; + new->b_array[new_i + 1] = Py_NewRef(node); } new_i += 2; @@ -2311,8 +2267,7 @@ _PyHamt_Assoc(PyHamtObject *o, PyObject *key, PyObject *val) if (new_root == o->h_root) { Py_DECREF(new_root); - Py_INCREF(o); - return o; + return (PyHamtObject*)Py_NewRef(o); } new_o = hamt_alloc(); @@ -2348,8 +2303,7 @@ _PyHamt_Without(PyHamtObject *o, PyObject *key) case W_EMPTY: return _PyHamt_New(); case W_NOT_FOUND: - Py_INCREF(o); - return o; + return (PyHamtObject*)Py_NewRef(o); case W_NEWNODE: { assert(new_root != NULL); @@ -2470,35 +2424,15 @@ hamt_alloc(void) return o; } +#define _empty_hamt \ + (&_Py_INTERP_SINGLETON(_PyInterpreterState_Get(), hamt_empty)) + PyHamtObject * _PyHamt_New(void) { - if (_empty_hamt != NULL) { - /* HAMT is an immutable object so we can easily cache an - empty instance. */ - Py_INCREF(_empty_hamt); - return _empty_hamt; - } - - PyHamtObject *o = hamt_alloc(); - if (o == NULL) { - return NULL; - } - - o->h_root = hamt_node_bitmap_new(0); - if (o->h_root == NULL) { - Py_DECREF(o); - return NULL; - } - - o->h_count = 0; - - if (_empty_hamt == NULL) { - Py_INCREF(o); - _empty_hamt = o; - } - - return o; + /* HAMT is an immutable object so we can easily cache an + empty instance. */ + return (PyHamtObject*)Py_NewRef(_empty_hamt); } #ifdef Py_DEBUG @@ -2591,8 +2525,7 @@ hamt_baseiter_new(PyTypeObject *type, binaryfunc yield, PyHamtObject *o) return NULL; } - Py_INCREF(o); - it->hi_obj = o; + it->hi_obj = (PyHamtObject*)Py_NewRef(o); it->hi_yield = yield; hamt_iterator_init(&it->hi_iter, o->h_root); @@ -2648,8 +2581,7 @@ PyTypeObject _PyHamtKeys_Type = { static PyObject * hamt_iter_yield_keys(PyObject *key, PyObject *val) { - Py_INCREF(key); - return key; + return Py_NewRef(key); } PyObject * @@ -2672,8 +2604,7 @@ PyTypeObject _PyHamtValues_Type = { static PyObject * hamt_iter_yield_values(PyObject *key, PyObject *val) { - Py_INCREF(val); - return val; + return Py_NewRef(val); } PyObject * @@ -2717,6 +2648,15 @@ hamt_tp_traverse(PyHamtObject *self, visitproc visit, void *arg) static void hamt_tp_dealloc(PyHamtObject *self) { + if (self == _empty_hamt) { + /* The empty one is statically allocated. */ +#ifdef Py_DEBUG + _Py_FatalRefcountError("deallocating the empty hamt singleton"); +#else + return; +#endif + } + PyObject_GC_UnTrack(self); if (self->h_weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject*)self); @@ -2766,8 +2706,7 @@ hamt_tp_subscript(PyHamtObject *self, PyObject *key) case F_ERROR: return NULL; case F_FOUND: - Py_INCREF(val); - return val; + return Py_NewRef(val); case F_NOT_FOUND: PyErr_SetObject(PyExc_KeyError, key); return NULL; @@ -2817,14 +2756,12 @@ hamt_py_get(PyHamtObject *self, PyObject *args) case F_ERROR: return NULL; case F_FOUND: - Py_INCREF(val); - return val; + return Py_NewRef(val); case F_NOT_FOUND: if (def == NULL) { Py_RETURN_NONE; } - Py_INCREF(def); - return def; + return Py_NewRef(def); default: Py_UNREACHABLE(); } @@ -2955,11 +2892,3 @@ PyTypeObject _PyHamt_CollisionNode_Type = { .tp_free = PyObject_GC_Del, .tp_hash = PyObject_HashNotImplemented, }; - - -void -_PyHamt_Fini(PyInterpreterState *interp) -{ - Py_CLEAR(_empty_hamt); - Py_CLEAR(_empty_bitmap_node); -} diff --git a/Python/import.c b/Python/import.c index 9d35d261774211..da6c15c5fd4144 100644 --- a/Python/import.c +++ b/Python/import.c @@ -27,13 +27,14 @@ extern "C" { /* Forward references */ static PyObject *import_add_module(PyThreadState *tstate, PyObject *name); -/* See _PyImport_FixupExtensionObject() below */ -static PyObject *extensions = NULL; - /* This table is defined in config.c: */ extern struct _inittab _PyImport_Inittab[]; +// This is not used after Py_Initialize() is called. +// (See _PyRuntimeState.imports.inittab.) struct _inittab *PyImport_Inittab = _PyImport_Inittab; +// When we dynamically allocate a larger table for PyImport_ExtendInittab(), +// we track the pointer here so we can deallocate it during finalization. static struct _inittab *inittab_copy = NULL; /*[clinic input] @@ -93,9 +94,9 @@ _PyImportZip_Init(PyThreadState *tstate) in different threads to return with a partially loaded module. These calls are serialized by the global interpreter lock. */ -static PyThread_type_lock import_lock = NULL; -static unsigned long import_lock_thread = PYTHREAD_INVALID_THREAD_ID; -static int import_lock_level = 0; +#define import_lock _PyRuntime.imports.lock.mutex +#define import_lock_thread _PyRuntime.imports.lock.thread +#define import_lock_level _PyRuntime.imports.lock.level void _PyImport_AcquireLock(void) @@ -221,14 +222,59 @@ _imp_release_lock_impl(PyObject *module) Py_RETURN_NONE; } +PyStatus +_PyImport_Init(void) +{ + if (_PyRuntime.imports.inittab != NULL) { + return _PyStatus_ERR("global import state already initialized"); + } + PyStatus status = _PyStatus_OK(); + + size_t size; + for (size = 0; PyImport_Inittab[size].name != NULL; size++) + ; + size++; + + /* Force default raw memory allocator to get a known allocator to be able + to release the memory in _PyImport_Fini() */ + PyMemAllocatorEx old_alloc; + _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); + + /* Make the copy. */ + struct _inittab *copied = PyMem_RawMalloc(size * sizeof(struct _inittab)); + if (copied == NULL) { + status = PyStatus_NoMemory(); + goto done; + } + memcpy(copied, PyImport_Inittab, size * sizeof(struct _inittab)); + _PyRuntime.imports.inittab = copied; + +done: + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); + return status; +} + +static inline void _extensions_cache_clear(void); + void _PyImport_Fini(void) { - Py_CLEAR(extensions); + _extensions_cache_clear(); if (import_lock != NULL) { PyThread_free_lock(import_lock); import_lock = NULL; } + + /* Use the same memory allocator as _PyImport_Init(). */ + PyMemAllocatorEx old_alloc; + _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); + + /* Free memory allocated by _PyImport_Init() */ + struct _inittab *inittab = _PyRuntime.imports.inittab; + _PyRuntime.imports.inittab = NULL; + PyMem_RawFree(inittab); + + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); } void @@ -398,6 +444,51 @@ PyImport_GetMagicTag(void) dictionary, to avoid loading shared libraries twice. */ +static PyModuleDef * +_extensions_cache_get(PyObject *filename, PyObject *name) +{ + PyObject *extensions = _PyRuntime.imports.extensions; + if (extensions == NULL) { + return NULL; + } + PyObject *key = PyTuple_Pack(2, filename, name); + if (key == NULL) { + return NULL; + } + PyModuleDef *def = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); + Py_DECREF(key); + return def; +} + +static int +_extensions_cache_set(PyObject *filename, PyObject *name, PyModuleDef *def) +{ + PyObject *extensions = _PyRuntime.imports.extensions; + if (extensions == NULL) { + extensions = PyDict_New(); + if (extensions == NULL) { + return -1; + } + _PyRuntime.imports.extensions = extensions; + } + PyObject *key = PyTuple_Pack(2, filename, name); + if (key == NULL) { + return -1; + } + int res = PyDict_SetItem(extensions, key, (PyObject *)def); + Py_DECREF(key); + if (res < 0) { + return -1; + } + return 0; +} + +static void +_extensions_cache_clear(void) +{ + Py_CLEAR(_PyRuntime.imports.extensions); +} + int _PyImport_FixupExtensionObject(PyObject *mod, PyObject *name, PyObject *filename, PyObject *modules) @@ -442,20 +533,7 @@ _PyImport_FixupExtensionObject(PyObject *mod, PyObject *name, } } - if (extensions == NULL) { - extensions = PyDict_New(); - if (extensions == NULL) { - return -1; - } - } - - PyObject *key = PyTuple_Pack(2, filename, name); - if (key == NULL) { - return -1; - } - int res = PyDict_SetItem(extensions, key, (PyObject *)def); - Py_DECREF(key); - if (res < 0) { + if (_extensions_cache_set(filename, name, def) < 0) { return -1; } } @@ -480,16 +558,7 @@ static PyObject * import_find_extension(PyThreadState *tstate, PyObject *name, PyObject *filename) { - if (extensions == NULL) { - return NULL; - } - - PyObject *key = PyTuple_Pack(2, filename, name); - if (key == NULL) { - return NULL; - } - PyModuleDef* def = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); - Py_DECREF(key); + PyModuleDef *def = _extensions_cache_get(filename, name); if (def == NULL) { return NULL; } @@ -556,8 +625,7 @@ import_add_module(PyThreadState *tstate, PyObject *name) PyObject *m; if (PyDict_CheckExact(modules)) { - m = PyDict_GetItemWithError(modules, name); - Py_XINCREF(m); + m = Py_XNewRef(PyDict_GetItemWithError(modules, name)); } else { m = PyObject_GetItem(modules, name); @@ -810,8 +878,7 @@ update_code_filenames(PyCodeObject *co, PyObject *oldname, PyObject *newname) if (PyUnicode_Compare(co->co_filename, oldname)) return; - Py_INCREF(newname); - Py_XSETREF(co->co_filename, newname); + Py_XSETREF(co->co_filename, Py_NewRef(newname)); constants = co->co_consts; n = PyTuple_GET_SIZE(constants); @@ -868,9 +935,10 @@ static int is_builtin(PyObject *name) { int i; - for (i = 0; PyImport_Inittab[i].name != NULL; i++) { - if (_PyUnicode_EqualToASCIIString(name, PyImport_Inittab[i].name)) { - if (PyImport_Inittab[i].initfunc == NULL) + struct _inittab *inittab = _PyRuntime.imports.inittab; + for (i = 0; inittab[i].name != NULL; i++) { + if (_PyUnicode_EqualToASCIIString(name, inittab[i].name)) { + if (inittab[i].initfunc == NULL) return -1; else return 1; @@ -905,8 +973,7 @@ get_path_importer(PyThreadState *tstate, PyObject *path_importer_cache, importer = PyDict_GetItemWithError(path_importer_cache, p); if (importer != NULL || _PyErr_Occurred(tstate)) { - Py_XINCREF(importer); - return importer; + return Py_XNewRef(importer); } /* set path_importer_cache[p] to None to avoid recursion */ @@ -964,11 +1031,12 @@ create_builtin(PyThreadState *tstate, PyObject *name, PyObject *spec) } PyObject *modules = tstate->interp->modules; - for (struct _inittab *p = PyImport_Inittab; p->name != NULL; p++) { + for (struct _inittab *p = _PyRuntime.imports.inittab; p->name != NULL; p++) { if (_PyUnicode_EqualToASCIIString(name, p->name)) { if (p->initfunc == NULL) { /* Cannot re-init internal module ("sys" or "builtins") */ - return PyImport_AddModuleObject(name); + mod = PyImport_AddModuleObject(name); + return Py_XNewRef(mod); } mod = _PyImport_InitFunc_TrampolineCall(*p->initfunc); if (mod == NULL) { @@ -1403,8 +1471,7 @@ PyImport_ImportFrozenModuleObject(PyObject *name) } } else { - Py_INCREF(Py_None); - origname = Py_None; + origname = Py_NewRef(Py_None); } err = PyDict_SetItemString(d, "__origname__", origname); Py_DECREF(origname); @@ -1516,8 +1583,7 @@ remove_importlib_frames(PyThreadState *tstate) if (in_importlib && (always_trim || _PyUnicode_EqualToASCIIString(code->co_name, remove_frames))) { - Py_XINCREF(next); - Py_XSETREF(*outer_link, next); + Py_XSETREF(*outer_link, Py_XNewRef(next)); prev_link = outer_link; } else { @@ -1693,8 +1759,8 @@ import_find_and_load(PyThreadState *tstate, PyObject *abs_name) PyObject *mod = NULL; PyInterpreterState *interp = tstate->interp; int import_time = _PyInterpreterState_GetConfig(interp)->import_time; - static int import_level; - static _PyTime_t accumulated; +#define import_level _PyRuntime.imports.find_and_load.import_level +#define accumulated _PyRuntime.imports.find_and_load.accumulated _PyTime_t t1 = 0, accumulated_copy = accumulated; @@ -1715,12 +1781,13 @@ import_find_and_load(PyThreadState *tstate, PyObject *abs_name) * _PyDict_GetItemIdWithError(). */ if (import_time) { - static int header = 1; +#define header _PyRuntime.imports.find_and_load.header if (header) { fputs("import time: self [us] | cumulative | imported package\n", stderr); header = 0; } +#undef header import_level++; t1 = _PyTime_GetPerfCounter(); @@ -1750,6 +1817,8 @@ import_find_and_load(PyThreadState *tstate, PyObject *abs_name) } return mod; +#undef import_level +#undef accumulated } PyObject * @@ -1813,8 +1882,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, _PyErr_SetString(tstate, PyExc_ValueError, "Empty module name"); goto error; } - abs_name = name; - Py_INCREF(abs_name); + abs_name = Py_NewRef(name); } mod = import_get_module(tstate, abs_name); @@ -1853,8 +1921,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, if (dot == -1) { /* No dot in module name, simple exit */ - final_mod = mod; - Py_INCREF(mod); + final_mod = Py_NewRef(mod); goto error; } @@ -1889,8 +1956,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, } } else { - final_mod = mod; - Py_INCREF(mod); + final_mod = Py_NewRef(mod); } } else { @@ -1905,8 +1971,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, mod, fromlist, interp->import_func, NULL); } else { - final_mod = mod; - Py_INCREF(mod); + final_mod = Py_NewRef(mod); } } @@ -2578,6 +2643,10 @@ PyImport_ExtendInittab(struct _inittab *newtab) size_t i, n; int res = 0; + if (_PyRuntime.imports.inittab != NULL) { + Py_FatalError("PyImport_ExtendInittab() may be be called after Py_Initialize()"); + } + /* Count the number of entries in both tables */ for (n = 0; newtab[n].name != NULL; n++) ; @@ -2622,6 +2691,10 @@ PyImport_AppendInittab(const char *name, PyObject* (*initfunc)(void)) { struct _inittab newtab[2]; + if (_PyRuntime.imports.inittab != NULL) { + Py_FatalError("PyImport_AppendInittab() may be be called after Py_Initialize()"); + } + memset(newtab, '\0', sizeof newtab); newtab[0].name = name; diff --git a/Python/importdl.c b/Python/importdl.c index 870ae2730071bb..40227674ca47ee 100644 --- a/Python/importdl.c +++ b/Python/importdl.c @@ -160,6 +160,7 @@ _PyImport_LoadDynamicModuleWithSpec(PyObject *spec, FILE *fp) p0 = (PyModInitFunction)exportfunc; /* Package context is needed for single-phase init */ +#define _Py_PackageContext (_PyRuntime.imports.pkgcontext) oldcontext = _Py_PackageContext; _Py_PackageContext = PyUnicode_AsUTF8(name_unicode); if (_Py_PackageContext == NULL) { @@ -168,6 +169,7 @@ _PyImport_LoadDynamicModuleWithSpec(PyObject *spec, FILE *fp) } m = _PyImport_InitFunc_TrampolineCall(p0); _Py_PackageContext = oldcontext; +#undef _Py_PackageContext if (m == NULL) { if (!PyErr_Occurred()) { diff --git a/Python/initconfig.c b/Python/initconfig.c index 4b784290b014d2..d05099cd997790 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -129,7 +129,14 @@ The following implementation-specific options are available:\n\ \n\ -X int_max_str_digits=number: limit the size of int<->str conversions.\n\ This helps avoid denial of service attacks when parsing untrusted data.\n\ - The default is sys.int_info.default_max_str_digits. 0 disables."; + The default is sys.int_info.default_max_str_digits. 0 disables." + +#ifdef Py_STATS +"\n\ +\n\ +-X pystats: Enable pystats collection at startup." +#endif +; /* Envvars that don't have equivalent command-line options are listed first */ static const char usage_envvars[] = @@ -241,7 +248,7 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS #define FROM_STRING(STR) \ ((STR != NULL) ? \ PyUnicode_FromString(STR) \ - : (Py_INCREF(Py_None), Py_None)) + : Py_NewRef(Py_None)) #define SET_ITEM_STR(VAR) \ SET_ITEM(#VAR, FROM_STRING(VAR)) @@ -595,17 +602,13 @@ _Py_ClearStandardStreamEncoding(void) /* --- Py_GetArgcArgv() ------------------------------------------- */ -/* For Py_GetArgcArgv(); set by _Py_SetArgcArgv() */ -static PyWideStringList orig_argv = {.length = 0, .items = NULL}; - - void _Py_ClearArgcArgv(void) { PyMemAllocatorEx old_alloc; _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - _PyWideStringList_Clear(&orig_argv); + _PyWideStringList_Clear(&_PyRuntime.orig_argv); PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); } @@ -620,7 +623,9 @@ _Py_SetArgcArgv(Py_ssize_t argc, wchar_t * const *argv) PyMemAllocatorEx old_alloc; _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - res = _PyWideStringList_Copy(&orig_argv, &argv_list); + // XXX _PyRuntime.orig_argv only gets cleared by Py_Main(), + // so it it currently leaks for embedders. + res = _PyWideStringList_Copy(&_PyRuntime.orig_argv, &argv_list); PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); return res; @@ -631,8 +636,8 @@ _Py_SetArgcArgv(Py_ssize_t argc, wchar_t * const *argv) void Py_GetArgcArgv(int *argc, wchar_t ***argv) { - *argc = (int)orig_argv.length; - *argv = orig_argv.items; + *argc = (int)_PyRuntime.orig_argv.length; + *argv = _PyRuntime.orig_argv.items; } @@ -1054,7 +1059,7 @@ _PyConfig_AsDict(const PyConfig *config) #define FROM_WSTRING(STR) \ ((STR != NULL) ? \ PyUnicode_FromWideChar(STR, -1) \ - : (Py_INCREF(Py_None), Py_None)) + : Py_NewRef(Py_None)) #define SET_ITEM_WSTR(ATTR) \ SET_ITEM(#ATTR, FROM_WSTRING(config->ATTR)) #define SET_ITEM_WSTRLIST(LIST) \ @@ -2188,6 +2193,12 @@ config_read(PyConfig *config, int compute_path_config) config->show_ref_count = 1; } +#ifdef Py_STATS + if (config_get_xoption(config, L"pystats")) { + _py_stats = &_py_stats_struct; + } +#endif + status = config_read_complex_options(config); if (_PyStatus_EXCEPTION(status)) { return status; diff --git a/Python/marshal.c b/Python/marshal.c index 90a44050918006..5f392d9e1ecfff 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -34,6 +34,8 @@ module marshal */ #if defined(MS_WINDOWS) #define MAX_MARSHAL_STACK_DEPTH 1000 +#elif defined(__wasi__) +#define MAX_MARSHAL_STACK_DEPTH 1500 #else #define MAX_MARSHAL_STACK_DEPTH 2000 #endif @@ -324,8 +326,8 @@ w_ref(PyObject *v, char *flag, WFILE *p) goto err; } w = (int)s; - Py_INCREF(v); - if (_Py_hashtable_set(p->hashtable, v, (void *)(uintptr_t)w) < 0) { + if (_Py_hashtable_set(p->hashtable, Py_NewRef(v), + (void *)(uintptr_t)w) < 0) { Py_DECREF(v); goto err; } @@ -949,8 +951,7 @@ r_ref_insert(PyObject *o, Py_ssize_t idx, int flag, RFILE *p) { if (o != NULL && flag) { /* currently only FLAG_REF is defined */ PyObject *tmp = PyList_GET_ITEM(p->refs, idx); - Py_INCREF(o); - PyList_SET_ITEM(p->refs, idx, o); + PyList_SET_ITEM(p->refs, idx, Py_NewRef(o)); Py_DECREF(tmp); } return o; @@ -1013,28 +1014,23 @@ r_object(RFILE *p) break; case TYPE_NONE: - Py_INCREF(Py_None); - retval = Py_None; + retval = Py_NewRef(Py_None); break; case TYPE_STOPITER: - Py_INCREF(PyExc_StopIteration); - retval = PyExc_StopIteration; + retval = Py_NewRef(PyExc_StopIteration); break; case TYPE_ELLIPSIS: - Py_INCREF(Py_Ellipsis); - retval = Py_Ellipsis; + retval = Py_NewRef(Py_Ellipsis); break; case TYPE_FALSE: - Py_INCREF(Py_False); - retval = Py_False; + retval = Py_NewRef(Py_False); break; case TYPE_TRUE: - Py_INCREF(Py_True); - retval = Py_True; + retval = Py_NewRef(Py_True); break; case TYPE_INT: @@ -1221,8 +1217,7 @@ r_object(RFILE *p) if (!PyErr_Occurred()) PyErr_SetString(PyExc_TypeError, "NULL object in marshal data for tuple"); - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); break; } PyTuple_SET_ITEM(v, i, v2); @@ -1248,8 +1243,7 @@ r_object(RFILE *p) if (!PyErr_Occurred()) PyErr_SetString(PyExc_TypeError, "NULL object in marshal data for list"); - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); break; } PyList_SET_ITEM(v, i, v2); @@ -1281,8 +1275,7 @@ r_object(RFILE *p) Py_DECREF(val); } if (PyErr_Occurred()) { - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); } retval = v; break; @@ -1326,8 +1319,7 @@ r_object(RFILE *p) if (!PyErr_Occurred()) PyErr_SetString(PyExc_TypeError, "NULL object in marshal data for set"); - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); break; } if (PySet_Add(v, v2) == -1) { @@ -1484,8 +1476,7 @@ r_object(RFILE *p) PyErr_SetString(PyExc_ValueError, "bad marshal data (invalid reference)"); break; } - Py_INCREF(v); - retval = v; + retval = Py_NewRef(v); break; default: diff --git a/Python/modsupport.c b/Python/modsupport.c index 8655daa1fc5e0e..b9a10dc157e7e5 100644 --- a/Python/modsupport.c +++ b/Python/modsupport.c @@ -10,9 +10,6 @@ typedef double va_double; static PyObject *va_build_value(const char *, va_list, int); static PyObject **va_build_stack(PyObject **small_stack, Py_ssize_t small_stack_len, const char *, va_list, int, Py_ssize_t*); -/* Package context -- the full module name for package imports */ -const char *_Py_PackageContext = NULL; - int _Py_convert_optional_to_ssize_t(PyObject *obj, void *result) @@ -359,8 +356,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) else n = -1; if (u == NULL) { - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); } else { if (n < 0) @@ -410,8 +406,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) else n = -1; if (str == NULL) { - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); } else { if (n < 0) { @@ -446,8 +441,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) else n = -1; if (str == NULL) { - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); } else { if (n < 0) { diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index 53b2770607b40e..b277fc2a20c18c 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -2,7 +2,7 @@ static void *opcode_targets[256] = { &&TARGET_CACHE, &&TARGET_POP_TOP, &&TARGET_PUSH_NULL, - &&TARGET_BINARY_OP_ADAPTIVE, + &&TARGET_INTERPRETER_EXIT, &&TARGET_END_FOR, &&TARGET_BINARY_OP_ADD_FLOAT, &&TARGET_BINARY_OP_ADD_INT, @@ -15,28 +15,30 @@ static void *opcode_targets[256] = { &&TARGET_BINARY_OP_MULTIPLY_FLOAT, &&TARGET_BINARY_OP_MULTIPLY_INT, &&TARGET_UNARY_INVERT, + &&TARGET_UNARY_POSITIVE_R, + &&TARGET_UNARY_NEGATIVE_R, + &&TARGET_UNARY_NOT_R, + &&TARGET_UNARY_INVERT_R, &&TARGET_BINARY_OP_SUBTRACT_FLOAT, &&TARGET_BINARY_OP_SUBTRACT_INT, - &&TARGET_BINARY_SUBSCR_ADAPTIVE, &&TARGET_BINARY_SUBSCR_DICT, &&TARGET_BINARY_SUBSCR_GETITEM, &&TARGET_BINARY_SUBSCR_LIST_INT, - &&TARGET_BINARY_SUBSCR_TUPLE_INT, - &&TARGET_CALL_ADAPTIVE, - &&TARGET_CALL_PY_EXACT_ARGS, &&TARGET_BINARY_SUBSCR, &&TARGET_BINARY_SLICE, &&TARGET_STORE_SLICE, - &&TARGET_CALL_PY_WITH_DEFAULTS, - &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, + &&TARGET_BINARY_SUBSCR_TUPLE_INT, + &&TARGET_CALL_PY_EXACT_ARGS, &&TARGET_GET_LEN, &&TARGET_MATCH_MAPPING, &&TARGET_MATCH_SEQUENCE, &&TARGET_MATCH_KEYS, - &&TARGET_CALL_BUILTIN_CLASS, + &&TARGET_CALL_PY_WITH_DEFAULTS, &&TARGET_PUSH_EXC_INFO, &&TARGET_CHECK_EXC_MATCH, &&TARGET_CHECK_EG_MATCH, + &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, + &&TARGET_CALL_BUILTIN_CLASS, &&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS, &&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, &&TARGET_CALL_NO_KW_BUILTIN_FAST, @@ -46,8 +48,6 @@ static void *opcode_targets[256] = { &&TARGET_CALL_NO_KW_LIST_APPEND, &&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_FAST, &&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS, - &&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_O, - &&TARGET_CALL_NO_KW_STR_1, &&TARGET_WITH_EXCEPT_START, &&TARGET_GET_AITER, &&TARGET_GET_ANEXT, @@ -55,24 +55,24 @@ static void *opcode_targets[256] = { &&TARGET_BEFORE_WITH, &&TARGET_END_ASYNC_FOR, &&TARGET_CLEANUP_THROW, + &&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_O, + &&TARGET_CALL_NO_KW_STR_1, &&TARGET_CALL_NO_KW_TUPLE_1, &&TARGET_CALL_NO_KW_TYPE_1, - &&TARGET_COMPARE_OP_ADAPTIVE, - &&TARGET_COMPARE_OP_FLOAT_JUMP, &&TARGET_STORE_SUBSCR, &&TARGET_DELETE_SUBSCR, + &&TARGET_COMPARE_OP_FLOAT_JUMP, + &&TARGET_STOPITERATION_ERROR, &&TARGET_COMPARE_OP_INT_JUMP, &&TARGET_COMPARE_OP_STR_JUMP, - &&TARGET_EXTENDED_ARG_QUICK, - &&TARGET_FOR_ITER_ADAPTIVE, &&TARGET_FOR_ITER_LIST, - &&TARGET_FOR_ITER_RANGE, + &&TARGET_FOR_ITER_TUPLE, &&TARGET_GET_ITER, &&TARGET_GET_YIELD_FROM_ITER, &&TARGET_PRINT_EXPR, &&TARGET_LOAD_BUILD_CLASS, - &&TARGET_JUMP_BACKWARD_QUICK, - &&TARGET_LOAD_ATTR_ADAPTIVE, + &&TARGET_FOR_ITER_RANGE, + &&TARGET_FOR_ITER_GEN, &&TARGET_LOAD_ASSERTION_ERROR, &&TARGET_RETURN_GENERATOR, &&TARGET_LOAD_ATTR_CLASS, @@ -152,32 +152,29 @@ static void *opcode_targets[256] = { &&TARGET_YIELD_VALUE, &&TARGET_RESUME, &&TARGET_MATCH_CLASS, - &&TARGET_LOAD_CONST__LOAD_FAST, - &&TARGET_LOAD_FAST__LOAD_CONST, + &&TARGET_LOAD_FAST_R, + &&TARGET_STORE_FAST_R, &&TARGET_FORMAT_VALUE, &&TARGET_BUILD_CONST_KEY_MAP, &&TARGET_BUILD_STRING, + &&TARGET_LOAD_CONST__LOAD_FAST, + &&TARGET_LOAD_FAST__LOAD_CONST, &&TARGET_LOAD_FAST__LOAD_FAST, - &&TARGET_LOAD_GLOBAL_ADAPTIVE, &&TARGET_LOAD_GLOBAL_BUILTIN, - &&TARGET_LOAD_GLOBAL_MODULE, &&TARGET_LIST_EXTEND, &&TARGET_SET_UPDATE, &&TARGET_DICT_MERGE, &&TARGET_DICT_UPDATE, - &&TARGET_RESUME_QUICK, - &&TARGET_STORE_ATTR_ADAPTIVE, + &&TARGET_LOAD_GLOBAL_MODULE, &&TARGET_STORE_ATTR_INSTANCE_VALUE, &&TARGET_STORE_ATTR_SLOT, &&TARGET_STORE_ATTR_WITH_HINT, + &&TARGET_STORE_FAST__LOAD_FAST, &&TARGET_CALL, &&TARGET_KW_NAMES, - &&TARGET_STORE_FAST__LOAD_FAST, &&TARGET_STORE_FAST__STORE_FAST, - &&TARGET_STORE_SUBSCR_ADAPTIVE, &&TARGET_STORE_SUBSCR_DICT, &&TARGET_STORE_SUBSCR_LIST_INT, - &&TARGET_UNPACK_SEQUENCE_ADAPTIVE, &&TARGET_UNPACK_SEQUENCE_LIST, &&TARGET_UNPACK_SEQUENCE_TUPLE, &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, @@ -254,5 +251,8 @@ static void *opcode_targets[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, &&TARGET_DO_TRACING }; diff --git a/Python/pathconfig.c b/Python/pathconfig.c index 69b7e10a3b0288..be0f97c4b204a9 100644 --- a/Python/pathconfig.c +++ b/Python/pathconfig.c @@ -261,6 +261,8 @@ Py_SetPythonHome(const wchar_t *home) _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); PyMem_RawFree(_Py_path_config.home); + _Py_path_config.home = NULL; + if (has_value) { _Py_path_config.home = _PyMem_RawWcsdup(home); } @@ -282,6 +284,8 @@ Py_SetProgramName(const wchar_t *program_name) _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); PyMem_RawFree(_Py_path_config.program_name); + _Py_path_config.program_name = NULL; + if (has_value) { _Py_path_config.program_name = _PyMem_RawWcsdup(program_name); } @@ -302,6 +306,8 @@ _Py_SetProgramFullPath(const wchar_t *program_full_path) _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); PyMem_RawFree(_Py_path_config.program_full_path); + _Py_path_config.program_full_path = NULL; + if (has_value) { _Py_path_config.program_full_path = _PyMem_RawWcsdup(program_full_path); } diff --git a/Python/perf_trampoline.c b/Python/perf_trampoline.c index 161e0ef74cf1da..1957ab82c33951 100644 --- a/Python/perf_trampoline.c +++ b/Python/perf_trampoline.c @@ -134,11 +134,6 @@ any DWARF information available for them). #include "pycore_frame.h" #include "pycore_interp.h" -typedef enum { - PERF_STATUS_FAILED = -1, // Perf trampoline is in an invalid state - PERF_STATUS_NO_INIT = 0, // Perf trampoline is not initialized - PERF_STATUS_OK = 1, // Perf trampoline is ready to be executed -} perf_status_t; #ifdef PY_HAVE_PERF_TRAMPOLINE @@ -190,24 +185,13 @@ struct code_arena_st { }; typedef struct code_arena_st code_arena_t; - -struct trampoline_api_st { - void* (*init_state)(void); - void (*write_state)(void* state, const void *code_addr, - unsigned int code_size, PyCodeObject* code); - int (*free_state)(void* state); - void *state; -}; - typedef struct trampoline_api_st trampoline_api_t; - -static perf_status_t perf_status = PERF_STATUS_NO_INIT; -static Py_ssize_t extra_code_index = -1; -static code_arena_t *code_arena; -static trampoline_api_t trampoline_api; - -static FILE *perf_map_file; +#define perf_status _PyRuntime.ceval.perf.status +#define extra_code_index _PyRuntime.ceval.perf.extra_code_index +#define perf_code_arena _PyRuntime.ceval.perf.code_arena +#define trampoline_api _PyRuntime.ceval.perf.trampoline_api +#define perf_map_file _PyRuntime.ceval.perf.map_file static void * perf_map_get_file(void) @@ -344,17 +328,17 @@ new_code_arena(void) new_arena->size = mem_size; new_arena->size_left = mem_size; new_arena->code_size = code_size; - new_arena->prev = code_arena; - code_arena = new_arena; + new_arena->prev = perf_code_arena; + perf_code_arena = new_arena; return 0; } static void free_code_arenas(void) { - code_arena_t *cur = code_arena; + code_arena_t *cur = perf_code_arena; code_arena_t *prev; - code_arena = NULL; // invalid static pointer + perf_code_arena = NULL; // invalid static pointer while (cur) { munmap(cur->start_addr, cur->size); prev = cur->prev; @@ -375,14 +359,14 @@ code_arena_new_code(code_arena_t *code_arena) static inline py_trampoline compile_trampoline(void) { - if ((code_arena == NULL) || - (code_arena->size_left <= code_arena->code_size)) { + if ((perf_code_arena == NULL) || + (perf_code_arena->size_left <= perf_code_arena->code_size)) { if (new_code_arena() < 0) { return NULL; } } - assert(code_arena->size_left <= code_arena->size); - return code_arena_new_code(code_arena); + assert(perf_code_arena->size_left <= perf_code_arena->size); + return code_arena_new_code(perf_code_arena); } static PyObject * @@ -405,7 +389,7 @@ py_trampoline_evaluator(PyThreadState *ts, _PyInterpreterFrame *frame, goto default_eval; } trampoline_api.write_state(trampoline_api.state, new_trampoline, - code_arena->code_size, co); + perf_code_arena->code_size, co); _PyCode_SetExtra((PyObject *)co, extra_code_index, (void *)new_trampoline); f = new_trampoline; diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index e64849296c57f7..cfed246a919b3b 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -10,6 +10,7 @@ #include "pycore_fileutils.h" // _Py_ResetForceASCII() #include "pycore_floatobject.h" // _PyFloat_InitTypes() #include "pycore_genobject.h" // _PyAsyncGen_Fini() +#include "pycore_global_objects_fini_generated.h" // "_PyStaticObjects_CheckRefcnt() #include "pycore_import.h" // _PyImport_BootstrapImp() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_list.h" // _PyList_Fini() @@ -28,6 +29,7 @@ #include "pycore_tuple.h" // _PyTuple_InitTypes() #include "pycore_typeobject.h" // _PyTypes_InitTypes() #include "pycore_unicodeobject.h" // _PyUnicode_InitTypes() +#include "opcode.h" extern void _PyIO_Fini(void); @@ -52,7 +54,6 @@ extern void _PyIO_Fini(void); #ifdef MS_WINDOWS # undef BYTE -# include "windows.h" extern PyTypeObject PyWindowsConsoleIO_Type; # define PyWindowsConsoleIO_Check(op) \ @@ -75,13 +76,15 @@ static PyStatus init_sys_streams(PyThreadState *tstate); static void wait_for_thread_shutdown(PyThreadState *tstate); static void call_ll_exitfuncs(_PyRuntimeState *runtime); -int _Py_UnhandledKeyboardInterrupt = 0; - /* The following places the `_PyRuntime` structure in a location that can be * found without any external information. This is meant to ease access to the * interpreter state for various runtime debugging tools, but is *not* an * officially supported feature */ +/* Suppress deprecation warning for PyBytesObject.ob_shash */ +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS + #if defined(MS_WINDOWS) #pragma section("PyRuntime", read, write) @@ -95,14 +98,11 @@ __attribute__(( #endif -/* Suppress deprecation warning for PyBytesObject.ob_shash */ -_Py_COMP_DIAG_PUSH -_Py_COMP_DIAG_IGNORE_DEPR_DECLS _PyRuntimeState _PyRuntime #if defined(__linux__) && (defined(__GNUC__) || defined(__clang__)) __attribute__ ((section (".PyRuntime"))) #endif -= _PyRuntimeState_INIT; += _PyRuntimeState_INIT(_PyRuntime); _Py_COMP_DIAG_POP static int runtime_initialized = 0; @@ -598,11 +598,23 @@ pycore_init_runtime(_PyRuntimeState *runtime, */ _PyRuntimeState_SetFinalizing(runtime, NULL); + _Py_InitVersion(); + status = _Py_HashRandomization_Init(config); if (_PyStatus_EXCEPTION(status)) { return status; } + status = _PyTime_Init(); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + + status = _PyImport_Init(); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + status = _PyInterpreterState_Enable(runtime); if (_PyStatus_EXCEPTION(status)) { return status; @@ -778,6 +790,21 @@ pycore_init_types(PyInterpreterState *interp) return _PyStatus_OK(); } +static const uint8_t INTERPRETER_TRAMPOLINE_INSTRUCTIONS[] = { + /* Put a NOP at the start, so that the IP points into + * the code, rather than before it */ + NOP, 0, 0, 0, + INTERPRETER_EXIT, 0, 0, 0, + /* RESUME at end makes sure that the frame appears incomplete */ + RESUME, 0, 0, 0, +}; + +static const _PyShimCodeDef INTERPRETER_TRAMPOLINE_CODEDEF = { + INTERPRETER_TRAMPOLINE_INSTRUCTIONS, + sizeof(INTERPRETER_TRAMPOLINE_INSTRUCTIONS), + 1, + "<interpreter trampoline>" +}; static PyStatus pycore_init_builtins(PyThreadState *tstate) @@ -797,8 +824,7 @@ pycore_init_builtins(PyThreadState *tstate) if (builtins_dict == NULL) { goto error; } - Py_INCREF(builtins_dict); - interp->builtins = builtins_dict; + interp->builtins = Py_NewRef(builtins_dict); PyObject *isinstance = PyDict_GetItem(builtins_dict, &_Py_ID(isinstance)); assert(isinstance); @@ -812,7 +838,10 @@ pycore_init_builtins(PyThreadState *tstate) PyObject *object__getattribute__ = _PyType_Lookup(&PyBaseObject_Type, &_Py_ID(__getattribute__)); assert(object__getattribute__); interp->callable_cache.object__getattribute__ = object__getattribute__; - + interp->interpreter_trampoline = _Py_MakeShimCode(&INTERPRETER_TRAMPOLINE_CODEDEF); + if (interp->interpreter_trampoline == NULL) { + return _PyStatus_ERR("failed to create interpreter trampoline."); + } if (_PyBuiltins_AddExceptions(bimod) < 0) { return _PyStatus_ERR("failed to add exceptions to builtins"); } @@ -1725,7 +1754,7 @@ finalize_interp_types(PyInterpreterState *interp) _PyUnicode_Fini(interp); _PyFloat_Fini(interp); #ifdef Py_DEBUG - _PyStaticObjects_CheckRefcnt(); + _PyStaticObjects_CheckRefcnt(interp); #endif } @@ -2288,8 +2317,7 @@ create_stdio(const PyConfig *config, PyObject* io, goto error; } else { - raw = buf; - Py_INCREF(raw); + raw = Py_NewRef(buf); } #ifdef MS_WINDOWS @@ -2551,8 +2579,7 @@ _Py_FatalError_PrintExc(PyThreadState *tstate) _PyErr_NormalizeException(tstate, &exception, &v, &tb); if (tb == NULL) { - tb = Py_None; - Py_INCREF(tb); + tb = Py_NewRef(Py_None); } PyException_SetTraceback(v, tb); if (exception == NULL) { diff --git a/Python/pystate.c b/Python/pystate.c index dd6d6e92eca89a..f52fc38b358689 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -51,8 +51,11 @@ static void _PyThreadState_Delete(PyThreadState *tstate, int check_current); _Py_COMP_DIAG_PUSH _Py_COMP_DIAG_IGNORE_DEPR_DECLS /* We use "initial" if the runtime gets re-used - (e.g. Py_Finalize() followed by Py_Initialize(). */ -static const _PyRuntimeState initial = _PyRuntimeState_INIT; + (e.g. Py_Finalize() followed by Py_Initialize(). + Note that we initialize "initial" relative to _PyRuntime, + to ensure pre-initialized pointers point to the active + runtime state (and not "initial"). */ +static const _PyRuntimeState initial = _PyRuntimeState_INIT(_PyRuntime); _Py_COMP_DIAG_POP static int @@ -134,8 +137,8 @@ init_runtime(_PyRuntimeState *runtime, // Set it to the ID of the main thread of the main interpreter. runtime->main_thread = PyThread_get_thread_ident(); - runtime->unicode_ids.next_index = unicode_next_index; - runtime->unicode_ids.lock = unicode_ids_mutex; + runtime->unicode_state.ids.next_index = unicode_next_index; + runtime->unicode_state.ids.lock = unicode_ids_mutex; runtime->_initialized = 1; } @@ -151,7 +154,7 @@ _PyRuntimeState_Init(_PyRuntimeState *runtime) _Py_AuditHookEntry *audit_hook_head = runtime->audit_hook_head; // bpo-42882: Preserve next_index value if Py_Initialize()/Py_Finalize() // is called multiple times. - Py_ssize_t unicode_next_index = runtime->unicode_ids.next_index; + Py_ssize_t unicode_next_index = runtime->unicode_state.ids.next_index; PyThread_type_lock lock1, lock2, lock3, lock4; if (alloc_for_runtime(&lock1, &lock2, &lock3, &lock4) != 0) { @@ -183,7 +186,7 @@ _PyRuntimeState_Fini(_PyRuntimeState *runtime) FREE_LOCK(runtime->interpreters.mutex); FREE_LOCK(runtime->xidregistry.mutex); - FREE_LOCK(runtime->unicode_ids.lock); + FREE_LOCK(runtime->unicode_state.ids.lock); FREE_LOCK(runtime->getargs.mutex); #undef FREE_LOCK @@ -206,7 +209,7 @@ _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime) int reinit_interp = _PyThread_at_fork_reinit(&runtime->interpreters.mutex); int reinit_xidregistry = _PyThread_at_fork_reinit(&runtime->xidregistry.mutex); - int reinit_unicode_ids = _PyThread_at_fork_reinit(&runtime->unicode_ids.lock); + int reinit_unicode_ids = _PyThread_at_fork_reinit(&runtime->unicode_state.ids.lock); int reinit_getargs = _PyThread_at_fork_reinit(&runtime->getargs.mutex); PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); @@ -272,7 +275,9 @@ alloc_interpreter(void) static void free_interpreter(PyInterpreterState *interp) { - if (!interp->_static) { + // The main interpreter is statically allocated so + // should not be freed. + if (interp != &_PyRuntime._main_interpreter) { PyMem_RawFree(interp); } } @@ -450,11 +455,26 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) PyDict_Clear(interp->builtins); Py_CLEAR(interp->sysdict); Py_CLEAR(interp->builtins); + Py_CLEAR(interp->interpreter_trampoline); for (int i=0; i < DICT_MAX_WATCHERS; i++) { - interp->dict_watchers[i] = NULL; + interp->dict_state.watchers[i] = NULL; } + for (int i=0; i < TYPE_MAX_WATCHERS; i++) { + interp->type_watchers[i] = NULL; + } + + for (int i=0; i < FUNC_MAX_WATCHERS; i++) { + interp->func_watchers[i] = NULL; + } + interp->active_func_watchers = 0; + + for (int i=0; i < CODE_MAX_WATCHERS; i++) { + interp->code_watchers[i] = NULL; + } + interp->active_code_watchers = 0; + // XXX Once we have one allocator per interpreter (i.e. // per-interpreter GC) we must ensure that all of the interpreter's // objects have been cleaned up at the point. @@ -754,7 +774,9 @@ alloc_threadstate(void) static void free_threadstate(PyThreadState *tstate) { - if (!tstate->_static) { + // The initial thread state of the interpreter is allocated + // as part of the interpreter state so should not be freed. + if (tstate != &tstate->interp->_initial_thread) { PyMem_RawFree(tstate); } } @@ -865,7 +887,9 @@ PyThreadState * PyThreadState_New(PyInterpreterState *interp) { PyThreadState *tstate = new_threadstate(interp); - _PyThreadState_SetCurrent(tstate); + if (tstate) { + _PyThreadState_SetCurrent(tstate); + } return tstate; } @@ -936,9 +960,8 @@ _PyState_AddModule(PyThreadState *tstate, PyObject* module, PyModuleDef* def) } } - Py_INCREF(module); return PyList_SetItem(interp->modules_by_index, - def->m_base.m_index, module); + def->m_base.m_index, Py_NewRef(module)); } int @@ -986,8 +1009,7 @@ PyState_RemoveModule(PyModuleDef* def) Py_FatalError("Module index out of bounds."); } - Py_INCREF(Py_None); - return PyList_SetItem(interp->modules_by_index, index, Py_None); + return PyList_SetItem(interp->modules_by_index, index, Py_NewRef(Py_None)); } // Used by finalize_modules() @@ -1291,8 +1313,7 @@ PyThreadState_GetFrame(PyThreadState *tstate) if (frame == NULL) { PyErr_Clear(); } - Py_XINCREF(frame); - return frame; + return (PyFrameObject*)Py_XNewRef(frame); } @@ -1338,8 +1359,7 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) * the decref. */ PyObject *old_exc = tstate->async_exc; - Py_XINCREF(exc); - tstate->async_exc = exc; + tstate->async_exc = Py_XNewRef(exc); HEAD_UNLOCK(runtime); Py_XDECREF(old_exc); @@ -1541,16 +1561,16 @@ _PyGILState_Init(_PyRuntimeState *runtime) PyStatus _PyGILState_SetTstate(PyThreadState *tstate) { + /* must init with valid states */ + assert(tstate != NULL); + assert(tstate->interp != NULL); + if (!_Py_IsMainInterpreter(tstate->interp)) { /* Currently, PyGILState is shared by all interpreters. The main * interpreter is responsible to initialize it. */ return _PyStatus_OK(); } - /* must init with valid states */ - assert(tstate != NULL); - assert(tstate->interp != NULL); - struct _gilstate_runtime_state *gilstate = &tstate->interp->runtime->gilstate; gilstate->autoInterpreterState = tstate->interp; @@ -1773,30 +1793,78 @@ PyGILState_Release(PyGILState_STATE oldstate) /* cross-interpreter data */ -crossinterpdatafunc _PyCrossInterpreterData_Lookup(PyObject *); +static inline void +_xidata_init(_PyCrossInterpreterData *data) +{ + // If the value is being reused + // then _xidata_clear() should have been called already. + assert(data->data == NULL); + assert(data->obj == NULL); + *data = (_PyCrossInterpreterData){0}; + data->interp = -1; +} -/* This is a separate func from _PyCrossInterpreterData_Lookup in order - to keep the registry code separate. */ -static crossinterpdatafunc -_lookup_getdata(PyObject *obj) +static inline void +_xidata_clear(_PyCrossInterpreterData *data) { - crossinterpdatafunc getdata = _PyCrossInterpreterData_Lookup(obj); - if (getdata == NULL && PyErr_Occurred() == 0) - PyErr_Format(PyExc_ValueError, - "%S does not support cross-interpreter data", obj); - return getdata; + if (data->free != NULL) { + data->free(data->data); + } + data->data = NULL; + Py_CLEAR(data->obj); +} + +void +_PyCrossInterpreterData_Init(_PyCrossInterpreterData *data, + PyInterpreterState *interp, + void *shared, PyObject *obj, + xid_newobjectfunc new_object) +{ + assert(data != NULL); + assert(new_object != NULL); + _xidata_init(data); + data->data = shared; + if (obj != NULL) { + assert(interp != NULL); + // released in _PyCrossInterpreterData_Clear() + data->obj = Py_NewRef(obj); + } + // Ideally every object would know its owning interpreter. + // Until then, we have to rely on the caller to identify it + // (but we don't need it in all cases). + data->interp = (interp != NULL) ? interp->id : -1; + data->new_object = new_object; } int -_PyObject_CheckCrossInterpreterData(PyObject *obj) -{ - crossinterpdatafunc getdata = _lookup_getdata(obj); - if (getdata == NULL) { +_PyCrossInterpreterData_InitWithSize(_PyCrossInterpreterData *data, + PyInterpreterState *interp, + const size_t size, PyObject *obj, + xid_newobjectfunc new_object) +{ + assert(size > 0); + // For now we always free the shared data in the same interpreter + // where it was allocated, so the interpreter is required. + assert(interp != NULL); + _PyCrossInterpreterData_Init(data, interp, NULL, obj, new_object); + data->data = PyMem_Malloc(size); + if (data->data == NULL) { return -1; } + data->free = PyMem_Free; return 0; } +void +_PyCrossInterpreterData_Clear(PyInterpreterState *interp, + _PyCrossInterpreterData *data) +{ + assert(data != NULL); + // This must be called in the owning interpreter. + assert(interp == NULL || data->interp == interp->id); + _xidata_clear(data); +} + static int _check_xidata(PyThreadState *tstate, _PyCrossInterpreterData *data) { @@ -1819,6 +1887,30 @@ _check_xidata(PyThreadState *tstate, _PyCrossInterpreterData *data) return 0; } +crossinterpdatafunc _PyCrossInterpreterData_Lookup(PyObject *); + +/* This is a separate func from _PyCrossInterpreterData_Lookup in order + to keep the registry code separate. */ +static crossinterpdatafunc +_lookup_getdata(PyObject *obj) +{ + crossinterpdatafunc getdata = _PyCrossInterpreterData_Lookup(obj); + if (getdata == NULL && PyErr_Occurred() == 0) + PyErr_Format(PyExc_ValueError, + "%S does not support cross-interpreter data", obj); + return getdata; +} + +int +_PyObject_CheckCrossInterpreterData(PyObject *obj) +{ + crossinterpdatafunc getdata = _lookup_getdata(obj); + if (getdata == NULL) { + return -1; + } + return 0; +} + int _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data) { @@ -1831,7 +1923,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data) // Reset data before re-populating. *data = (_PyCrossInterpreterData){0}; - data->free = PyMem_RawFree; // Set a default that may be overridden. + data->interp = -1; // Call the "getdata" func for the object. Py_INCREF(obj); @@ -1840,7 +1932,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data) Py_DECREF(obj); return -1; } - int res = getdata(obj, data); + int res = getdata(tstate, obj, data); Py_DECREF(obj); if (res != 0) { return -1; @@ -1849,27 +1941,24 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data) // Fill in the blanks and validate the result. data->interp = interp->id; if (_check_xidata(tstate, data) != 0) { - _PyCrossInterpreterData_Release(data); + (void)_PyCrossInterpreterData_Release(data); return -1; } return 0; } -static void -_release_xidata(void *arg) +PyObject * +_PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data) { - _PyCrossInterpreterData *data = (_PyCrossInterpreterData *)arg; - if (data->free != NULL) { - data->free(data->data); - } - Py_XDECREF(data->obj); + return data->new_object(data); } +typedef void (*releasefunc)(PyInterpreterState *, void *); + static void _call_in_interpreter(struct _gilstate_runtime_state *gilstate, - PyInterpreterState *interp, - void (*func)(void *), void *arg) + PyInterpreterState *interp, releasefunc func, void *arg) { /* We would use Py_AddPendingCall() if it weren't specific to the * main interpreter (see bpo-33608). In the meantime we take a @@ -1883,7 +1972,9 @@ _call_in_interpreter(struct _gilstate_runtime_state *gilstate, save_tstate = _PyThreadState_Swap(gilstate, tstate); } - func(arg); + // XXX Once the GIL is per-interpreter, this should be called with the + // calling interpreter's GIL released and the target interpreter's held. + func(interp, arg); // Switch back. if (save_tstate != NULL) { @@ -1891,33 +1982,30 @@ _call_in_interpreter(struct _gilstate_runtime_state *gilstate, } } -void +int _PyCrossInterpreterData_Release(_PyCrossInterpreterData *data) { - if (data->data == NULL && data->obj == NULL) { + if (data->free == NULL && data->obj == NULL) { // Nothing to release! - return; + data->data = NULL; + return 0; } // Switch to the original interpreter. PyInterpreterState *interp = _PyInterpreterState_LookUpID(data->interp); if (interp == NULL) { // The interpreter was already destroyed. - if (data->free != NULL) { - // XXX Someone leaked some memory... - } - return; + // This function shouldn't have been called. + // XXX Someone leaked some memory... + assert(PyErr_Occurred()); + return -1; } // "Release" the data and/or the object. struct _gilstate_runtime_state *gilstate = &_PyRuntime.gilstate; - _call_in_interpreter(gilstate, interp, _release_xidata, data); -} - -PyObject * -_PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data) -{ - return data->new_object(data); + _call_in_interpreter(gilstate, interp, + (releasefunc)_PyCrossInterpreterData_Clear, data); + return 0; } /* registry of {type -> crossinterpdatafunc} */ @@ -1927,21 +2015,73 @@ _PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data) crossinterpdatafunc. It would be simpler and more efficient. */ static int -_register_xidata(struct _xidregistry *xidregistry, PyTypeObject *cls, +_xidregistry_add_type(struct _xidregistry *xidregistry, PyTypeObject *cls, crossinterpdatafunc getdata) { // Note that we effectively replace already registered classes // rather than failing. struct _xidregitem *newhead = PyMem_RawMalloc(sizeof(struct _xidregitem)); - if (newhead == NULL) + if (newhead == NULL) { + return -1; + } + // XXX Assign a callback to clear the entry from the registry? + newhead->cls = PyWeakref_NewRef((PyObject *)cls, NULL); + if (newhead->cls == NULL) { + PyMem_RawFree(newhead); return -1; - newhead->cls = cls; + } newhead->getdata = getdata; + newhead->prev = NULL; newhead->next = xidregistry->head; + if (newhead->next != NULL) { + newhead->next->prev = newhead; + } xidregistry->head = newhead; return 0; } +static struct _xidregitem * +_xidregistry_remove_entry(struct _xidregistry *xidregistry, + struct _xidregitem *entry) +{ + struct _xidregitem *next = entry->next; + if (entry->prev != NULL) { + assert(entry->prev->next == entry); + entry->prev->next = next; + } + else { + assert(xidregistry->head == entry); + xidregistry->head = next; + } + if (next != NULL) { + next->prev = entry->prev; + } + Py_DECREF(entry->cls); + PyMem_RawFree(entry); + return next; +} + +static struct _xidregitem * +_xidregistry_find_type(struct _xidregistry *xidregistry, PyTypeObject *cls) +{ + struct _xidregitem *cur = xidregistry->head; + while (cur != NULL) { + PyObject *registered = PyWeakref_GetObject(cur->cls); + if (registered == Py_None) { + // The weakly ref'ed object was freed. + cur = _xidregistry_remove_entry(xidregistry, cur); + } + else { + assert(PyType_Check(registered)); + if (registered == (PyObject *)cls) { + return cur; + } + cur = cur->next; + } + } + return NULL; +} + static void _register_builtins_for_crossinterpreter_data(struct _xidregistry *xidregistry); int @@ -1957,19 +2097,32 @@ _PyCrossInterpreterData_RegisterClass(PyTypeObject *cls, return -1; } - // Make sure the class isn't ever deallocated. - Py_INCREF((PyObject *)cls); - struct _xidregistry *xidregistry = &_PyRuntime.xidregistry ; PyThread_acquire_lock(xidregistry->mutex, WAIT_LOCK); if (xidregistry->head == NULL) { _register_builtins_for_crossinterpreter_data(xidregistry); } - int res = _register_xidata(xidregistry, cls, getdata); + int res = _xidregistry_add_type(xidregistry, cls, getdata); + PyThread_release_lock(xidregistry->mutex); + return res; +} + +int +_PyCrossInterpreterData_UnregisterClass(PyTypeObject *cls) +{ + int res = 0; + struct _xidregistry *xidregistry = &_PyRuntime.xidregistry ; + PyThread_acquire_lock(xidregistry->mutex, WAIT_LOCK); + struct _xidregitem *matched = _xidregistry_find_type(xidregistry, cls); + if (matched != NULL) { + (void)_xidregistry_remove_entry(xidregistry, matched); + res = 1; + } PyThread_release_lock(xidregistry->mutex); return res; } + /* Cross-interpreter objects are looked up by exact match on the class. We can reassess this policy when we move from a global registry to a tp_* slot. */ @@ -1979,22 +2132,15 @@ _PyCrossInterpreterData_Lookup(PyObject *obj) { struct _xidregistry *xidregistry = &_PyRuntime.xidregistry ; PyObject *cls = PyObject_Type(obj); - crossinterpdatafunc getdata = NULL; PyThread_acquire_lock(xidregistry->mutex, WAIT_LOCK); - struct _xidregitem *cur = xidregistry->head; - if (cur == NULL) { + if (xidregistry->head == NULL) { _register_builtins_for_crossinterpreter_data(xidregistry); - cur = xidregistry->head; - } - for(; cur != NULL; cur = cur->next) { - if (cur->cls == (PyTypeObject *)cls) { - getdata = cur->getdata; - break; - } } + struct _xidregitem *matched = _xidregistry_find_type(xidregistry, + (PyTypeObject *)cls); Py_DECREF(cls); PyThread_release_lock(xidregistry->mutex); - return getdata; + return matched != NULL ? matched->getdata : NULL; } /* cross-interpreter data for builtin types */ @@ -2012,17 +2158,21 @@ _new_bytes_object(_PyCrossInterpreterData *data) } static int -_bytes_shared(PyObject *obj, _PyCrossInterpreterData *data) +_bytes_shared(PyThreadState *tstate, PyObject *obj, + _PyCrossInterpreterData *data) { - struct _shared_bytes_data *shared = PyMem_NEW(struct _shared_bytes_data, 1); + if (_PyCrossInterpreterData_InitWithSize( + data, tstate->interp, sizeof(struct _shared_bytes_data), obj, + _new_bytes_object + ) < 0) + { + return -1; + } + struct _shared_bytes_data *shared = (struct _shared_bytes_data *)data->data; if (PyBytes_AsStringAndSize(obj, &shared->bytes, &shared->len) < 0) { + _PyCrossInterpreterData_Clear(tstate->interp, data); return -1; } - data->data = (void *)shared; - Py_INCREF(obj); - data->obj = obj; // Will be "released" (decref'ed) when data released. - data->new_object = _new_bytes_object; - data->free = PyMem_Free; return 0; } @@ -2040,17 +2190,20 @@ _new_str_object(_PyCrossInterpreterData *data) } static int -_str_shared(PyObject *obj, _PyCrossInterpreterData *data) +_str_shared(PyThreadState *tstate, PyObject *obj, + _PyCrossInterpreterData *data) { - struct _shared_str_data *shared = PyMem_NEW(struct _shared_str_data, 1); + if (_PyCrossInterpreterData_InitWithSize( + data, tstate->interp, sizeof(struct _shared_str_data), obj, + _new_str_object + ) < 0) + { + return -1; + } + struct _shared_str_data *shared = (struct _shared_str_data *)data->data; shared->kind = PyUnicode_KIND(obj); shared->buffer = PyUnicode_DATA(obj); shared->len = PyUnicode_GET_LENGTH(obj); - data->data = (void *)shared; - Py_INCREF(obj); - data->obj = obj; // Will be "released" (decref'ed) when data released. - data->new_object = _new_str_object; - data->free = PyMem_Free; return 0; } @@ -2061,7 +2214,8 @@ _new_long_object(_PyCrossInterpreterData *data) } static int -_long_shared(PyObject *obj, _PyCrossInterpreterData *data) +_long_shared(PyThreadState *tstate, PyObject *obj, + _PyCrossInterpreterData *data) { /* Note that this means the size of shareable ints is bounded by * sys.maxsize. Hence on 32-bit architectures that is half the @@ -2074,10 +2228,9 @@ _long_shared(PyObject *obj, _PyCrossInterpreterData *data) } return -1; } - data->data = (void *)value; - data->obj = NULL; - data->new_object = _new_long_object; - data->free = NULL; + _PyCrossInterpreterData_Init(data, tstate->interp, (void *)value, NULL, + _new_long_object); + // data->obj and data->free remain NULL return 0; } @@ -2085,17 +2238,16 @@ static PyObject * _new_none_object(_PyCrossInterpreterData *data) { // XXX Singleton refcounts are problematic across interpreters... - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static int -_none_shared(PyObject *obj, _PyCrossInterpreterData *data) +_none_shared(PyThreadState *tstate, PyObject *obj, + _PyCrossInterpreterData *data) { - data->data = NULL; - // data->obj remains NULL - data->new_object = _new_none_object; - data->free = NULL; // There is nothing to free. + _PyCrossInterpreterData_Init(data, tstate->interp, NULL, NULL, + _new_none_object); + // data->data, data->obj and data->free remain NULL return 0; } @@ -2103,22 +2255,22 @@ static void _register_builtins_for_crossinterpreter_data(struct _xidregistry *xidregistry) { // None - if (_register_xidata(xidregistry, (PyTypeObject *)PyObject_Type(Py_None), _none_shared) != 0) { + if (_xidregistry_add_type(xidregistry, (PyTypeObject *)PyObject_Type(Py_None), _none_shared) != 0) { Py_FatalError("could not register None for cross-interpreter sharing"); } // int - if (_register_xidata(xidregistry, &PyLong_Type, _long_shared) != 0) { + if (_xidregistry_add_type(xidregistry, &PyLong_Type, _long_shared) != 0) { Py_FatalError("could not register int for cross-interpreter sharing"); } // bytes - if (_register_xidata(xidregistry, &PyBytes_Type, _bytes_shared) != 0) { + if (_xidregistry_add_type(xidregistry, &PyBytes_Type, _bytes_shared) != 0) { Py_FatalError("could not register bytes for cross-interpreter sharing"); } // str - if (_register_xidata(xidregistry, &PyUnicode_Type, _str_shared) != 0) { + if (_xidregistry_add_type(xidregistry, &PyUnicode_Type, _str_shared) != 0) { Py_FatalError("could not register str for cross-interpreter sharing"); } } diff --git a/Python/pythonrun.c b/Python/pythonrun.c index a0005b32fcf146..35292b6478a833 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -515,8 +515,7 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, if (v == Py_None) { Py_DECREF(v); _Py_DECLARE_STR(anon_string, "<string>"); - *filename = &_Py_STR(anon_string); - Py_INCREF(*filename); + *filename = Py_NewRef(&_Py_STR(anon_string)); } else { *filename = v; @@ -719,8 +718,7 @@ _Py_HandleSystemExit(int *exitcode_p) /* The error code should be in the `code' attribute. */ PyObject *code = PyObject_GetAttr(value, &_Py_ID(code)); if (code) { - Py_DECREF(value); - value = code; + Py_SETREF(value, code); if (value == Py_None) goto done; } @@ -786,8 +784,7 @@ _PyErr_PrintEx(PyThreadState *tstate, int set_sys_last_vars) _PyErr_NormalizeException(tstate, &exception, &v, &tb); if (tb == NULL) { - tb = Py_None; - Py_INCREF(tb); + tb = Py_NewRef(Py_None); } PyException_SetTraceback(v, tb); if (exception == NULL) { @@ -833,12 +830,10 @@ _PyErr_PrintEx(PyThreadState *tstate, int set_sys_last_vars) to be NULL. However PyErr_Display() can't tolerate NULLs, so just be safe. */ if (exception2 == NULL) { - exception2 = Py_None; - Py_INCREF(exception2); + exception2 = Py_NewRef(Py_None); } if (v2 == NULL) { - v2 = Py_None; - Py_INCREF(v2); + v2 = Py_NewRef(Py_None); } fflush(stdout); PySys_WriteStderr("Error in sys.excepthook:\n"); @@ -1691,7 +1686,8 @@ run_eval_code_obj(PyThreadState *tstate, PyCodeObject *co, PyObject *globals, Py * uncaught exception to trigger an unexplained signal exit from a future * Py_Main() based one. */ - _Py_UnhandledKeyboardInterrupt = 0; + // XXX Isn't this dealt with by the move to _PyRuntimeState? + _PyRuntime.signals.unhandled_keyboard_interrupt = 0; /* Set globals['__builtins__'] if it doesn't exist */ if (globals != NULL && _PyDict_GetItemStringWithError(globals, "__builtins__") == NULL) { @@ -1705,7 +1701,7 @@ run_eval_code_obj(PyThreadState *tstate, PyCodeObject *co, PyObject *globals, Py v = PyEval_EvalCode((PyObject*)co, globals, locals); if (!v && _PyErr_Occurred(tstate) == PyExc_KeyboardInterrupt) { - _Py_UnhandledKeyboardInterrupt = 1; + _PyRuntime.signals.unhandled_keyboard_interrupt = 1; } return v; } diff --git a/Python/specialize.c b/Python/specialize.c index 6e2fa4e25fa47d..f50b491692c905 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -16,25 +16,9 @@ * ./adaptive.md */ -/* Map from opcode to adaptive opcode. - Values of zero are ignored. */ -uint8_t _PyOpcode_Adaptive[256] = { - [LOAD_ATTR] = LOAD_ATTR_ADAPTIVE, - [LOAD_GLOBAL] = LOAD_GLOBAL_ADAPTIVE, - [BINARY_SUBSCR] = BINARY_SUBSCR_ADAPTIVE, - [STORE_SUBSCR] = STORE_SUBSCR_ADAPTIVE, - [CALL] = CALL_ADAPTIVE, - [STORE_ATTR] = STORE_ATTR_ADAPTIVE, - [BINARY_OP] = BINARY_OP_ADAPTIVE, - [COMPARE_OP] = COMPARE_OP_ADAPTIVE, - [UNPACK_SEQUENCE] = UNPACK_SEQUENCE_ADAPTIVE, - [FOR_ITER] = FOR_ITER_ADAPTIVE, -}; - -Py_ssize_t _Py_QuickenedCount = 0; #ifdef Py_STATS PyStats _py_stats_struct = { 0 }; -PyStats *_py_stats = &_py_stats_struct; +PyStats *_py_stats = NULL; #define ADD_STAT_TO_DICT(res, field) \ do { \ @@ -144,7 +128,7 @@ print_spec_stats(FILE *out, OpcodeStats *stats) fprintf(out, "opcode[%d].specializable : 1\n", BINARY_SLICE); fprintf(out, "opcode[%d].specializable : 1\n", STORE_SLICE); for (int i = 0; i < 256; i++) { - if (_PyOpcode_Adaptive[i]) { + if (_PyOpcode_Caches[i]) { fprintf(out, "opcode[%d].specializable : 1\n", i); } PRINT_STAT(i, specialization.success); @@ -203,6 +187,11 @@ print_object_stats(FILE *out, ObjectStats *stats) fprintf(out, "Object materialize dict (new key): %" PRIu64 "\n", stats->dict_materialized_new_key); fprintf(out, "Object materialize dict (too big): %" PRIu64 "\n", stats->dict_materialized_too_big); fprintf(out, "Object materialize dict (str subclass): %" PRIu64 "\n", stats->dict_materialized_str_subclass); + fprintf(out, "Object method cache hits: %" PRIu64 "\n", stats->type_cache_hits); + fprintf(out, "Object method cache misses: %" PRIu64 "\n", stats->type_cache_misses); + fprintf(out, "Object method cache collisions: %" PRIu64 "\n", stats->type_cache_collisions); + fprintf(out, "Object method cache dunder hits: %" PRIu64 "\n", stats->type_cache_dunder_hits); + fprintf(out, "Object method cache dunder misses: %" PRIu64 "\n", stats->type_cache_dunder_misses); } static void @@ -221,9 +210,6 @@ _Py_StatsClear(void) void _Py_PrintSpecializationStats(int to_file) { - if (_py_stats == NULL) { - return; - } FILE *out = stderr; if (to_file) { /* Write to a file instead of stderr. */ @@ -254,7 +240,7 @@ _Py_PrintSpecializationStats(int to_file) else { fprintf(out, "Specialization stats:\n"); } - print_stats(out, _py_stats); + print_stats(out, &_py_stats_struct); if (out != stderr) { fclose(out); } @@ -276,82 +262,42 @@ do { \ #define SPECIALIZATION_FAIL(opcode, kind) ((void)0) #endif -// Insert adaptive instructions and superinstructions. This cannot fail. +// Initialize warmup counters and insert superinstructions. This cannot fail. void _PyCode_Quicken(PyCodeObject *code) { - _Py_QuickenedCount++; - int previous_opcode = -1; + int previous_opcode = 0; _Py_CODEUNIT *instructions = _PyCode_CODE(code); - for (int i = 0; i < Py_SIZE(code); i++) { - int opcode = _Py_OPCODE(instructions[i]); - uint8_t adaptive_opcode = _PyOpcode_Adaptive[opcode]; - if (adaptive_opcode) { - _Py_SET_OPCODE(instructions[i], adaptive_opcode); - // Make sure the adaptive counter is zero: - assert(instructions[i + 1] == 0); - previous_opcode = -1; - i += _PyOpcode_Caches[opcode]; - } - else { - assert(!_PyOpcode_Caches[opcode]); - switch (opcode) { - case EXTENDED_ARG: - _Py_SET_OPCODE(instructions[i], EXTENDED_ARG_QUICK); - break; - case JUMP_BACKWARD: - _Py_SET_OPCODE(instructions[i], JUMP_BACKWARD_QUICK); - break; - case RESUME: - _Py_SET_OPCODE(instructions[i], RESUME_QUICK); - break; - case LOAD_FAST: - switch(previous_opcode) { - case LOAD_FAST: - _Py_SET_OPCODE(instructions[i - 1], - LOAD_FAST__LOAD_FAST); - break; - case STORE_FAST: - _Py_SET_OPCODE(instructions[i - 1], - STORE_FAST__LOAD_FAST); - break; - case LOAD_CONST: - _Py_SET_OPCODE(instructions[i - 1], - LOAD_CONST__LOAD_FAST); - break; - } - break; - case STORE_FAST: - if (previous_opcode == STORE_FAST) { - _Py_SET_OPCODE(instructions[i - 1], - STORE_FAST__STORE_FAST); - } - break; - case LOAD_CONST: - if (previous_opcode == LOAD_FAST) { - _Py_SET_OPCODE(instructions[i - 1], - LOAD_FAST__LOAD_CONST); - } - break; - } - previous_opcode = opcode; + for (int i = 0; i < Py_SIZE(code); i += OPSIZE) { + int opcode = _PyOpcode_Deopt[_Py_OPCODE(instructions[i])]; + int caches = _PyOpcode_Caches[opcode]; + if (caches) { + instructions[i + OPSIZE].cache = adaptive_counter_warmup(); + previous_opcode = 0; + i += caches; + continue; + } + switch (previous_opcode << 8 | opcode) { + case LOAD_CONST << 8 | LOAD_FAST: + instructions[i - OPSIZE].opcode = LOAD_CONST__LOAD_FAST; + break; + case LOAD_FAST << 8 | LOAD_CONST: + instructions[i - OPSIZE].opcode = LOAD_FAST__LOAD_CONST; + break; + case LOAD_FAST << 8 | LOAD_FAST: + instructions[i - OPSIZE].opcode = LOAD_FAST__LOAD_FAST; + break; + case STORE_FAST << 8 | LOAD_FAST: + instructions[i - OPSIZE].opcode = STORE_FAST__LOAD_FAST; + break; + case STORE_FAST << 8 | STORE_FAST: + instructions[i - OPSIZE].opcode = STORE_FAST__STORE_FAST; + break; } + previous_opcode = opcode; } } -static inline int -miss_counter_start(void) { - /* Starting value for the counter. - * This value needs to be not too low, otherwise - * it would cause excessive de-optimization. - * Neither should it be too high, or that would delay - * de-optimization excessively when it is needed. - * A value around 50 seems to work, and we choose a - * prime number to avoid artifacts. - */ - return 53; -} - #define SIMPLE_FUNCTION 0 /* Common */ @@ -366,6 +312,7 @@ miss_counter_start(void) { #define SPEC_FAIL_NOT_PY_FUNCTION 7 +#define SPEC_FAIL_LOAD_GLOBAL_NON_DICT 17 #define SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT 18 /* Attributes */ @@ -391,6 +338,8 @@ miss_counter_start(void) { #define SPEC_FAIL_ATTR_INSTANCE_ATTRIBUTE 26 #define SPEC_FAIL_ATTR_METACLASS_ATTRIBUTE 27 #define SPEC_FAIL_ATTR_PROPERTY_NOT_PY_FUNCTION 28 +#define SPEC_FAIL_ATTR_NOT_IN_KEYS 29 +#define SPEC_FAIL_ATTR_NOT_IN_DICT 30 /* Binary subscr and store subscr */ @@ -507,41 +456,44 @@ static bool function_check_args(PyObject *o, int expected_argcount, int opcode); static uint32_t function_get_version(PyObject *o, int opcode); static int -specialize_module_load_attr(PyObject *owner, _Py_CODEUNIT *instr, - PyObject *name, int opcode, int opcode_module) -{ - _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); +specialize_module_load_attr( + PyObject *owner, _Py_CODEUNIT *instr, PyObject *name +) { + _PyAttrCache *cache = (_PyAttrCache *)(instr + OPSIZE); PyModuleObject *m = (PyModuleObject *)owner; assert((owner->ob_type->tp_flags & Py_TPFLAGS_MANAGED_DICT) == 0); PyDictObject *dict = (PyDictObject *)m->md_dict; if (dict == NULL) { - SPECIALIZATION_FAIL(opcode, SPEC_FAIL_NO_DICT); + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_NO_DICT); return -1; } if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) { - SPECIALIZATION_FAIL(opcode, SPEC_FAIL_ATTR_NON_STRING_OR_SPLIT); + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_NON_STRING_OR_SPLIT); return -1; } Py_ssize_t index = _PyDict_LookupIndex(dict, &_Py_ID(__getattr__)); assert(index != DKIX_ERROR); if (index != DKIX_EMPTY) { - SPECIALIZATION_FAIL(opcode, SPEC_FAIL_ATTR_MODULE_ATTR_NOT_FOUND); + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_MODULE_ATTR_NOT_FOUND); return -1; } index = _PyDict_LookupIndex(dict, name); assert (index != DKIX_ERROR); if (index != (uint16_t)index) { - SPECIALIZATION_FAIL(opcode, SPEC_FAIL_OUT_OF_RANGE); + SPECIALIZATION_FAIL(LOAD_ATTR, + index == DKIX_EMPTY ? + SPEC_FAIL_ATTR_MODULE_ATTR_NOT_FOUND : + SPEC_FAIL_OUT_OF_RANGE); return -1; } uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(dict->ma_keys); if (keys_version == 0) { - SPECIALIZATION_FAIL(opcode, SPEC_FAIL_OUT_OF_VERSIONS); + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS); return -1; } write_u32(cache->version, keys_version); cache->index = (uint16_t)index; - _Py_SET_OPCODE(*instr, opcode_module); + _py_set_opcode(instr, LOAD_ATTR_MODULE); return 0; } @@ -679,7 +631,7 @@ specialize_dict_access( SPECIALIZATION_FAIL(base_op, SPEC_FAIL_ATTR_NOT_MANAGED_DICT); return 0; } - _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); + _PyAttrCache *cache = (_PyAttrCache *)(instr + OPSIZE); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); if (_PyDictOrValues_IsValues(dorv)) { // Virtual dictionary @@ -688,12 +640,15 @@ specialize_dict_access( Py_ssize_t index = _PyDictKeys_StringLookup(keys, name); assert (index != DKIX_ERROR); if (index != (uint16_t)index) { - SPECIALIZATION_FAIL(base_op, SPEC_FAIL_OUT_OF_RANGE); + SPECIALIZATION_FAIL(base_op, + index == DKIX_EMPTY ? + SPEC_FAIL_ATTR_NOT_IN_KEYS : + SPEC_FAIL_OUT_OF_RANGE); return 0; } write_u32(cache->version, type->tp_version_tag); cache->index = (uint16_t)index; - _Py_SET_OPCODE(*instr, values_op); + _py_set_opcode(instr, values_op); } else { PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); @@ -705,12 +660,15 @@ specialize_dict_access( Py_ssize_t index = _PyDict_LookupIndex(dict, name); if (index != (uint16_t)index) { - SPECIALIZATION_FAIL(base_op, SPEC_FAIL_OUT_OF_RANGE); + SPECIALIZATION_FAIL(base_op, + index == DKIX_EMPTY ? + SPEC_FAIL_ATTR_NOT_IN_DICT : + SPEC_FAIL_OUT_OF_RANGE); return 0; } cache->index = (uint16_t)index; write_u32(cache->version, type->tp_version_tag); - _Py_SET_OPCODE(*instr, hint_op); + _py_set_opcode(instr, hint_op); } return 1; } @@ -719,32 +677,32 @@ static int specialize_attr_loadmethod(PyObject* owner, _Py_CODEUNIT* instr, PyOb PyObject* descr, DescriptorClassification kind); static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name); -int +void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) { assert(_PyOpcode_Caches[LOAD_ATTR] == INLINE_CACHE_ENTRIES_LOAD_ATTR); - _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); + _PyAttrCache *cache = (_PyAttrCache *)(instr + OPSIZE); + PyTypeObject *type = Py_TYPE(owner); + if (!_PyType_IsReady(type)) { + // We *might* not really need this check, but we inherited it from + // PyObject_GenericGetAttr and friends... and this way we still do the + // right thing if someone forgets to call PyType_Ready(type): + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER); + goto fail; + } if (PyModule_CheckExact(owner)) { - int err = specialize_module_load_attr(owner, instr, name, LOAD_ATTR, - LOAD_ATTR_MODULE); - if (err) { + if (specialize_module_load_attr(owner, instr, name)) + { goto fail; } goto success; } if (PyType_Check(owner)) { - int err = specialize_class_load_attr(owner, instr, name); - if (err) { + if (specialize_class_load_attr(owner, instr, name)) { goto fail; } goto success; } - PyTypeObject *type = Py_TYPE(owner); - if (type->tp_dict == NULL) { - if (PyType_Ready(type) < 0) { - return -1; - } - } PyObject *descr = NULL; DescriptorClassification kind = analyze_descriptor(type, name, &descr, 0); assert(descr != NULL || kind == ABSENT || kind == GETSET_OVERRIDDEN); @@ -760,12 +718,14 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) goto success; } } - SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD); + else { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD); + } goto fail; } case PROPERTY: { - _PyLoadMethodCache *lm_cache = (_PyLoadMethodCache *)(instr + 1); + _PyLoadMethodCache *lm_cache = (_PyLoadMethodCache *)(instr + OPSIZE); assert(Py_TYPE(descr) == &PyProperty_Type); PyObject *fget = ((_PyPropertyObject *)descr)->prop_get; if (fget == NULL) { @@ -788,7 +748,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) write_u32(lm_cache->type_version, type->tp_version_tag); /* borrowed */ write_obj(lm_cache->descr, fget); - _Py_SET_OPCODE(*instr, LOAD_ATTR_PROPERTY); + _py_set_opcode(instr, LOAD_ATTR_PROPERTY); goto success; } case OBJECT_SLOT: @@ -796,6 +756,10 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) PyMemberDescrObject *member = (PyMemberDescrObject *)descr; struct PyMemberDef *dmem = member->d_member; Py_ssize_t offset = dmem->offset; + if (!PyObject_TypeCheck(owner, member->d_common.d_type)) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR); + goto fail; + } if (dmem->flags & PY_AUDIT_READ) { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_AUDITED_SLOT); goto fail; @@ -808,7 +772,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) assert(offset > 0); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); - _Py_SET_OPCODE(*instr, LOAD_ATTR_SLOT); + _py_set_opcode(instr, LOAD_ATTR_SLOT); goto success; } case DUNDER_CLASS: @@ -817,7 +781,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) assert(offset == (uint16_t)offset); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); - _Py_SET_OPCODE(*instr, LOAD_ATTR_SLOT); + _py_set_opcode(instr, LOAD_ATTR_SLOT); goto success; } case OTHER_SLOT: @@ -833,7 +797,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) { assert(type->tp_getattro == _Py_slot_tp_getattro); assert(Py_IS_TYPE(descr, &PyFunction_Type)); - _PyLoadMethodCache *lm_cache = (_PyLoadMethodCache *)(instr + 1); + _PyLoadMethodCache *lm_cache = (_PyLoadMethodCache *)(instr + OPSIZE); if (!function_check_args(descr, 2, LOAD_ATTR)) { goto fail; } @@ -845,7 +809,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) /* borrowed */ write_obj(lm_cache->descr, descr); write_u32(lm_cache->type_version, type->tp_version_tag); - _Py_SET_OPCODE(*instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); + _py_set_opcode(instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); goto success; } case BUILTIN_CLASSMETHOD: @@ -855,34 +819,36 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) case ABSENT: break; } - int err = specialize_dict_access( - owner, instr, type, kind, name, - LOAD_ATTR, LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_WITH_HINT - ); - if (err < 0) { - return -1; - } - if (err) { + if (specialize_dict_access(owner, instr, type, kind, name, LOAD_ATTR, + LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_WITH_HINT)) + { goto success; } fail: STAT_INC(LOAD_ATTR, failure); assert(!PyErr_Occurred()); + _py_set_opcode(instr, LOAD_ATTR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(LOAD_ATTR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } -int +void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) { assert(_PyOpcode_Caches[STORE_ATTR] == INLINE_CACHE_ENTRIES_STORE_ATTR); - _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); + _PyAttrCache *cache = (_PyAttrCache *)(instr + OPSIZE); PyTypeObject *type = Py_TYPE(owner); + if (!_PyType_IsReady(type)) { + // We *might* not really need this check, but we inherited it from + // PyObject_GenericSetAttr and friends... and this way we still do the + // right thing if someone forgets to call PyType_Ready(type): + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER); + goto fail; + } if (PyModule_CheckExact(owner)) { SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_OVERRIDDEN); goto fail; @@ -904,6 +870,10 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) PyMemberDescrObject *member = (PyMemberDescrObject *)descr; struct PyMemberDef *dmem = member->d_member; Py_ssize_t offset = dmem->offset; + if (!PyObject_TypeCheck(owner, member->d_common.d_type)) { + SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_EXPECTED_ERROR); + goto fail; + } if (dmem->flags & READONLY) { SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_ATTR_READ_ONLY); goto fail; @@ -916,7 +886,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) assert(offset > 0); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); - _Py_SET_OPCODE(*instr, STORE_ATTR_SLOT); + _py_set_opcode(instr, STORE_ATTR_SLOT); goto success; } case DUNDER_CLASS: @@ -937,27 +907,21 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) case ABSENT: break; } - - int err = specialize_dict_access( - owner, instr, type, kind, name, - STORE_ATTR, STORE_ATTR_INSTANCE_VALUE, STORE_ATTR_WITH_HINT - ); - if (err < 0) { - return -1; - } - if (err) { + if (specialize_dict_access(owner, instr, type, kind, name, STORE_ATTR, + STORE_ATTR_INSTANCE_VALUE, STORE_ATTR_WITH_HINT)) + { goto success; } fail: STAT_INC(STORE_ATTR, failure); assert(!PyErr_Occurred()); + _py_set_opcode(instr, STORE_ATTR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(STORE_ATTR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } @@ -1002,7 +966,7 @@ static int specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) { - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)(instr + 1); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)(instr + OPSIZE); if (!PyType_CheckExact(owner) || _PyType_Lookup(Py_TYPE(owner), name)) { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METACLASS_ATTRIBUTE); return -1; @@ -1015,7 +979,7 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, case NON_DESCRIPTOR: write_u32(cache->type_version, ((PyTypeObject *)owner)->tp_version_tag); write_obj(cache->descr, descr); - _Py_SET_OPCODE(*instr, LOAD_ATTR_CLASS); + _py_set_opcode(instr, LOAD_ATTR_CLASS); return 0; #ifdef Py_STATS case ABSENT: @@ -1043,7 +1007,7 @@ static int specialize_attr_loadmethod(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, PyObject *descr, DescriptorClassification kind) { - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)(instr + 1); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)(instr + OPSIZE); PyTypeObject *owner_cls = Py_TYPE(owner); assert(kind == METHOD && descr != NULL); @@ -1097,21 +1061,21 @@ PyObject *descr, DescriptorClassification kind) } switch(dictkind) { case NO_DICT: - _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_NO_DICT); + _py_set_opcode(instr, LOAD_ATTR_METHOD_NO_DICT); break; case MANAGED_VALUES: - _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_WITH_VALUES); + _py_set_opcode(instr, LOAD_ATTR_METHOD_WITH_VALUES); break; case MANAGED_DICT: SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_HAS_MANAGED_DICT); goto fail; case OFFSET_DICT: assert(owner_cls->tp_dictoffset > 0 && owner_cls->tp_dictoffset <= INT16_MAX); - _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_WITH_DICT); + _py_set_opcode(instr, LOAD_ATTR_METHOD_WITH_DICT); break; case LAZY_DICT: assert(owner_cls->tp_dictoffset > 0 && owner_cls->tp_dictoffset <= INT16_MAX); - _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_LAZY_DICT); + _py_set_opcode(instr, LOAD_ATTR_METHOD_LAZY_DICT); break; } /* `descr` is borrowed. This is safe for methods (even inherited ones from @@ -1130,22 +1094,22 @@ PyObject *descr, DescriptorClassification kind) */ write_u32(cache->type_version, owner_cls->tp_version_tag); write_obj(cache->descr, descr); - // Fall through. return 1; fail: return 0; } -int +void _Py_Specialize_LoadGlobal( PyObject *globals, PyObject *builtins, _Py_CODEUNIT *instr, PyObject *name) { assert(_PyOpcode_Caches[LOAD_GLOBAL] == INLINE_CACHE_ENTRIES_LOAD_GLOBAL); /* Use inline cache */ - _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)(instr + 1); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)(instr + OPSIZE); assert(PyUnicode_CheckExact(name)); if (!PyDict_CheckExact(globals)) { + SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_DICT); goto fail; } PyDictKeysObject * globals_keys = ((PyDictObject *)globals)->ma_keys; @@ -1155,23 +1119,26 @@ _Py_Specialize_LoadGlobal( } Py_ssize_t index = _PyDictKeys_StringLookup(globals_keys, name); if (index == DKIX_ERROR) { - SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT); + SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_EXPECTED_ERROR); goto fail; } if (index != DKIX_EMPTY) { if (index != (uint16_t)index) { + SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(globals_keys); if (keys_version == 0) { + SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; } cache->index = (uint16_t)index; write_u32(cache->module_keys_version, keys_version); - _Py_SET_OPCODE(*instr, LOAD_GLOBAL_MODULE); + _py_set_opcode(instr, LOAD_GLOBAL_MODULE); goto success; } if (!PyDict_CheckExact(builtins)) { + SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_DICT); goto fail; } PyDictKeysObject * builtin_keys = ((PyDictObject *)builtins)->ma_keys; @@ -1181,10 +1148,11 @@ _Py_Specialize_LoadGlobal( } index = _PyDictKeys_StringLookup(builtin_keys, name); if (index == DKIX_ERROR) { - SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT); + SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_EXPECTED_ERROR); goto fail; } if (index != (uint16_t)index) { + SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } uint32_t globals_version = _PyDictKeys_GetVersionForCurrentState(globals_keys); @@ -1204,18 +1172,18 @@ _Py_Specialize_LoadGlobal( cache->index = (uint16_t)index; write_u32(cache->module_keys_version, globals_version); cache->builtin_keys_version = (uint16_t)builtins_version; - _Py_SET_OPCODE(*instr, LOAD_GLOBAL_BUILTIN); + _py_set_opcode(instr, LOAD_GLOBAL_BUILTIN); goto success; fail: STAT_INC(LOAD_GLOBAL, failure); assert(!PyErr_Occurred()); + _py_set_opcode(instr, LOAD_GLOBAL); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(LOAD_GLOBAL, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } #ifdef Py_STATS @@ -1303,17 +1271,17 @@ function_get_version(PyObject *o, int opcode) return version; } -int +void _Py_Specialize_BinarySubscr( PyObject *container, PyObject *sub, _Py_CODEUNIT *instr) { assert(_PyOpcode_Caches[BINARY_SUBSCR] == INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)(instr + 1); + _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)(instr + OPSIZE); PyTypeObject *container_type = Py_TYPE(container); if (container_type == &PyList_Type) { if (PyLong_CheckExact(sub)) { - _Py_SET_OPCODE(*instr, BINARY_SUBSCR_LIST_INT); + _py_set_opcode(instr, BINARY_SUBSCR_LIST_INT); goto success; } SPECIALIZATION_FAIL(BINARY_SUBSCR, @@ -1322,7 +1290,7 @@ _Py_Specialize_BinarySubscr( } if (container_type == &PyTuple_Type) { if (PyLong_CheckExact(sub)) { - _Py_SET_OPCODE(*instr, BINARY_SUBSCR_TUPLE_INT); + _py_set_opcode(instr, BINARY_SUBSCR_TUPLE_INT); goto success; } SPECIALIZATION_FAIL(BINARY_SUBSCR, @@ -1330,7 +1298,7 @@ _Py_Specialize_BinarySubscr( goto fail; } if (container_type == &PyDict_Type) { - _Py_SET_OPCODE(*instr, BINARY_SUBSCR_DICT); + _py_set_opcode(instr, BINARY_SUBSCR_DICT); goto success; } PyTypeObject *cls = Py_TYPE(container); @@ -1361,7 +1329,7 @@ _Py_Specialize_BinarySubscr( } cache->func_version = version; ((PyHeapTypeObject *)container_type)->_spec_cache.getitem = descriptor; - _Py_SET_OPCODE(*instr, BINARY_SUBSCR_GETITEM); + _py_set_opcode(instr, BINARY_SUBSCR_GETITEM); goto success; } SPECIALIZATION_FAIL(BINARY_SUBSCR, @@ -1369,26 +1337,26 @@ _Py_Specialize_BinarySubscr( fail: STAT_INC(BINARY_SUBSCR, failure); assert(!PyErr_Occurred()); + _py_set_opcode(instr, BINARY_SUBSCR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(BINARY_SUBSCR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } -int +void _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *instr) { - _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)(instr + 1); + _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)(instr + OPSIZE); PyTypeObject *container_type = Py_TYPE(container); if (container_type == &PyList_Type) { if (PyLong_CheckExact(sub)) { if ((Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) && ((PyLongObject *)sub)->ob_digit[0] < (size_t)PyList_GET_SIZE(container)) { - _Py_SET_OPCODE(*instr, STORE_SUBSCR_LIST_INT); + _py_set_opcode(instr, STORE_SUBSCR_LIST_INT); goto success; } else { @@ -1406,7 +1374,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins } } if (container_type == &PyDict_Type) { - _Py_SET_OPCODE(*instr, STORE_SUBSCR_DICT); + _py_set_opcode(instr, STORE_SUBSCR_DICT); goto success; } #ifdef Py_STATS @@ -1473,20 +1441,19 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins fail: STAT_INC(STORE_SUBSCR, failure); assert(!PyErr_Occurred()); + _py_set_opcode(instr, STORE_SUBSCR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(STORE_SUBSCR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } static int specialize_class_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); PyTypeObject *tp = _PyType_CAST(callable); if (tp->tp_new == PyBaseObject_Type.tp_new) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_PYTHON_CLASS); @@ -1496,20 +1463,20 @@ specialize_class_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, int oparg = _Py_OPARG(*instr); if (nargs == 1 && kwnames == NULL && oparg == 1) { if (tp == &PyUnicode_Type) { - _Py_SET_OPCODE(*instr, CALL_NO_KW_STR_1); + _py_set_opcode(instr, CALL_NO_KW_STR_1); return 0; } else if (tp == &PyType_Type) { - _Py_SET_OPCODE(*instr, CALL_NO_KW_TYPE_1); + _py_set_opcode(instr, CALL_NO_KW_TYPE_1); return 0; } else if (tp == &PyTuple_Type) { - _Py_SET_OPCODE(*instr, CALL_NO_KW_TUPLE_1); + _py_set_opcode(instr, CALL_NO_KW_TUPLE_1); return 0; } } if (tp->tp_vectorcall != NULL) { - _Py_SET_OPCODE(*instr, CALL_BUILTIN_CLASS); + _py_set_opcode(instr, CALL_BUILTIN_CLASS); return 0; } SPECIALIZATION_FAIL(CALL, tp == &PyUnicode_Type ? @@ -1548,7 +1515,6 @@ static int specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); if (kwnames) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_KWNAMES); return -1; @@ -1562,7 +1528,7 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr, SPECIALIZATION_FAIL(CALL, SPEC_FAIL_WRONG_NUMBER_ARGUMENTS); return -1; } - _Py_SET_OPCODE(*instr, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS); + _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS); return 0; } case METH_O: { @@ -1576,18 +1542,18 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr, bool pop = (_Py_OPCODE(next) == POP_TOP); int oparg = _Py_OPARG(*instr); if ((PyObject *)descr == list_append && oparg == 1 && pop) { - _Py_SET_OPCODE(*instr, CALL_NO_KW_LIST_APPEND); + _py_set_opcode(instr, CALL_NO_KW_LIST_APPEND); return 0; } - _Py_SET_OPCODE(*instr, CALL_NO_KW_METHOD_DESCRIPTOR_O); + _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_O); return 0; } case METH_FASTCALL: { - _Py_SET_OPCODE(*instr, CALL_NO_KW_METHOD_DESCRIPTOR_FAST); + _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_FAST); return 0; } case METH_FASTCALL|METH_KEYWORDS: { - _Py_SET_OPCODE(*instr, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS); + _py_set_opcode(instr, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS); return 0; } } @@ -1599,8 +1565,7 @@ static int specialize_py_call(PyFunctionObject *func, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames, bool bound_method) { - _PyCallCache *cache = (_PyCallCache *)(instr + 1); - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); + _PyCallCache *cache = (_PyCallCache *)(instr + OPSIZE); PyCodeObject *code = (PyCodeObject *)func->func_code; int kind = function_kind(code); /* Don't specialize if PEP 523 is active */ @@ -1639,14 +1604,14 @@ specialize_py_call(PyFunctionObject *func, _Py_CODEUNIT *instr, int nargs, write_u32(cache->func_version, version); cache->min_args = min_args; if (argcount == nargs) { - _Py_SET_OPCODE(*instr, bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS); + _py_set_opcode(instr, bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS); } else if (bound_method) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_BOUND_METHOD); return -1; } else { - _Py_SET_OPCODE(*instr, CALL_PY_WITH_DEFAULTS); + _py_set_opcode(instr, CALL_PY_WITH_DEFAULTS); } return 0; } @@ -1655,7 +1620,6 @@ static int specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); if (PyCFunction_GET_FUNCTION(callable) == NULL) { return 1; } @@ -1674,10 +1638,10 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, /* len(o) */ PyInterpreterState *interp = _PyInterpreterState_GET(); if (callable == interp->callable_cache.len) { - _Py_SET_OPCODE(*instr, CALL_NO_KW_LEN); + _py_set_opcode(instr, CALL_NO_KW_LEN); return 0; } - _Py_SET_OPCODE(*instr, CALL_NO_KW_BUILTIN_O); + _py_set_opcode(instr, CALL_NO_KW_BUILTIN_O); return 0; } case METH_FASTCALL: { @@ -1689,15 +1653,15 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, /* isinstance(o1, o2) */ PyInterpreterState *interp = _PyInterpreterState_GET(); if (callable == interp->callable_cache.isinstance) { - _Py_SET_OPCODE(*instr, CALL_NO_KW_ISINSTANCE); + _py_set_opcode(instr, CALL_NO_KW_ISINSTANCE); return 0; } } - _Py_SET_OPCODE(*instr, CALL_NO_KW_BUILTIN_FAST); + _py_set_opcode(instr, CALL_NO_KW_BUILTIN_FAST); return 0; } case METH_FASTCALL | METH_KEYWORDS: { - _Py_SET_OPCODE(*instr, CALL_BUILTIN_FAST_WITH_KEYWORDS); + _py_set_opcode(instr, CALL_BUILTIN_FAST_WITH_KEYWORDS); return 0; } default: @@ -1752,12 +1716,12 @@ call_fail_kind(PyObject *callable) /* TODO: - Specialize calling classes. */ -int +void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL); - _PyCallCache *cache = (_PyCallCache *)(instr + 1); + _PyCallCache *cache = (_PyCallCache *)(instr + OPSIZE); int fail; if (PyCFunction_CheckExact(callable)) { fail = specialize_c_call(callable, instr, nargs, kwnames); @@ -1790,14 +1754,14 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, if (fail) { STAT_INC(CALL, failure); assert(!PyErr_Occurred()); + _py_set_opcode(instr, CALL); cache->counter = adaptive_counter_backoff(cache->counter); } else { STAT_INC(CALL, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } - return 0; } #ifdef Py_STATS @@ -1875,7 +1839,7 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg, PyObject **locals) { assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP); - _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1); + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + OPSIZE); switch (oparg) { case NB_ADD: case NB_INPLACE_ADD: @@ -1887,18 +1851,18 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, bool to_store = (_Py_OPCODE(next) == STORE_FAST || _Py_OPCODE(next) == STORE_FAST__LOAD_FAST); if (to_store && locals[_Py_OPARG(next)] == lhs) { - _Py_SET_OPCODE(*instr, BINARY_OP_INPLACE_ADD_UNICODE); + _py_set_opcode(instr, BINARY_OP_INPLACE_ADD_UNICODE); goto success; } - _Py_SET_OPCODE(*instr, BINARY_OP_ADD_UNICODE); + _py_set_opcode(instr, BINARY_OP_ADD_UNICODE); goto success; } if (PyLong_CheckExact(lhs)) { - _Py_SET_OPCODE(*instr, BINARY_OP_ADD_INT); + _py_set_opcode(instr, BINARY_OP_ADD_INT); goto success; } if (PyFloat_CheckExact(lhs)) { - _Py_SET_OPCODE(*instr, BINARY_OP_ADD_FLOAT); + _py_set_opcode(instr, BINARY_OP_ADD_FLOAT); goto success; } break; @@ -1908,11 +1872,11 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, break; } if (PyLong_CheckExact(lhs)) { - _Py_SET_OPCODE(*instr, BINARY_OP_MULTIPLY_INT); + _py_set_opcode(instr, BINARY_OP_MULTIPLY_INT); goto success; } if (PyFloat_CheckExact(lhs)) { - _Py_SET_OPCODE(*instr, BINARY_OP_MULTIPLY_FLOAT); + _py_set_opcode(instr, BINARY_OP_MULTIPLY_FLOAT); goto success; } break; @@ -1922,32 +1886,23 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, break; } if (PyLong_CheckExact(lhs)) { - _Py_SET_OPCODE(*instr, BINARY_OP_SUBTRACT_INT); + _py_set_opcode(instr, BINARY_OP_SUBTRACT_INT); goto success; } if (PyFloat_CheckExact(lhs)) { - _Py_SET_OPCODE(*instr, BINARY_OP_SUBTRACT_FLOAT); + _py_set_opcode(instr, BINARY_OP_SUBTRACT_FLOAT); goto success; } break; -#ifndef Py_STATS - default: - // These operators don't have any available specializations. Rather - // than repeatedly attempting to specialize them, just convert them - // back to BINARY_OP (unless we're collecting stats, where it's more - // important to get accurate hit counts for the unadaptive version - // and each of the different failure types): - _Py_SET_OPCODE(*instr, BINARY_OP); - return; -#endif } SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs)); STAT_INC(BINARY_OP, failure); + _py_set_opcode(instr, BINARY_OP); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(BINARY_OP, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } @@ -2004,25 +1959,15 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg) { assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP); - _PyCompareOpCache *cache = (_PyCompareOpCache *)(instr + 1); + _PyCompareOpCache *cache = (_PyCompareOpCache *)(instr + OPSIZE); int next_opcode = _Py_OPCODE(instr[INLINE_CACHE_ENTRIES_COMPARE_OP + 1]); - if (next_opcode != POP_JUMP_IF_FALSE && - next_opcode != POP_JUMP_IF_TRUE) { - // Can't ever combine, so don't don't bother being adaptive (unless - // we're collecting stats, where it's more important to get accurate hit - // counts for the unadaptive version and each of the different failure - // types): -#ifndef Py_STATS - _Py_SET_OPCODE(*instr, COMPARE_OP); - return; -#else + if (next_opcode != POP_JUMP_IF_FALSE && next_opcode != POP_JUMP_IF_TRUE) { if (next_opcode == EXTENDED_ARG) { SPECIALIZATION_FAIL(COMPARE_OP, SPEC_FAIL_COMPARE_OP_EXTENDED_ARG); goto failure; } SPECIALIZATION_FAIL(COMPARE_OP, SPEC_FAIL_COMPARE_OP_NOT_FOLLOWED_BY_COND_JUMP); goto failure; -#endif } assert(oparg <= Py_GE); int when_to_jump_mask = compare_masks[oparg]; @@ -2034,13 +1979,13 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, goto failure; } if (PyFloat_CheckExact(lhs)) { - _Py_SET_OPCODE(*instr, COMPARE_OP_FLOAT_JUMP); + _py_set_opcode(instr, COMPARE_OP_FLOAT_JUMP); cache->mask = when_to_jump_mask; goto success; } if (PyLong_CheckExact(lhs)) { if (Py_ABS(Py_SIZE(lhs)) <= 1 && Py_ABS(Py_SIZE(rhs)) <= 1) { - _Py_SET_OPCODE(*instr, COMPARE_OP_INT_JUMP); + _py_set_opcode(instr, COMPARE_OP_INT_JUMP); cache->mask = when_to_jump_mask; goto success; } @@ -2055,7 +2000,7 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, goto failure; } else { - _Py_SET_OPCODE(*instr, COMPARE_OP_STR_JUMP); + _py_set_opcode(instr, COMPARE_OP_STR_JUMP); cache->mask = (when_to_jump_mask & 2) == 0; goto success; } @@ -2063,11 +2008,12 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs)); failure: STAT_INC(COMPARE_OP, failure); + _py_set_opcode(instr, COMPARE_OP); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(COMPARE_OP, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } #ifdef Py_STATS @@ -2089,17 +2035,17 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg) { assert(_PyOpcode_Caches[UNPACK_SEQUENCE] == INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)(instr + 1); + _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)(instr + OPSIZE); if (PyTuple_CheckExact(seq)) { if (PyTuple_GET_SIZE(seq) != oparg) { SPECIALIZATION_FAIL(UNPACK_SEQUENCE, SPEC_FAIL_EXPECTED_ERROR); goto failure; } if (PyTuple_GET_SIZE(seq) == 2) { - _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE_TWO_TUPLE); + _py_set_opcode(instr, UNPACK_SEQUENCE_TWO_TUPLE); goto success; } - _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE_TUPLE); + _py_set_opcode(instr, UNPACK_SEQUENCE_TUPLE); goto success; } if (PyList_CheckExact(seq)) { @@ -2107,17 +2053,18 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg) SPECIALIZATION_FAIL(UNPACK_SEQUENCE, SPEC_FAIL_EXPECTED_ERROR); goto failure; } - _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE_LIST); + _py_set_opcode(instr, UNPACK_SEQUENCE_LIST); goto success; } SPECIALIZATION_FAIL(UNPACK_SEQUENCE, unpack_sequence_fail_kind(seq)); failure: STAT_INC(UNPACK_SEQUENCE, failure); + _py_set_opcode(instr, UNPACK_SEQUENCE); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(UNPACK_SEQUENCE, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } #ifdef Py_STATS @@ -2193,27 +2140,37 @@ int #endif void -_Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr) +_Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg) { assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER); - _PyForIterCache *cache = (_PyForIterCache *)(instr + 1); + _PyForIterCache *cache = (_PyForIterCache *)(instr + OPSIZE); PyTypeObject *tp = Py_TYPE(iter); - _Py_CODEUNIT next = instr[1+INLINE_CACHE_ENTRIES_FOR_ITER]; + _Py_CODEUNIT next = instr[OPSIZE + INLINE_CACHE_ENTRIES_FOR_ITER]; int next_op = _PyOpcode_Deopt[_Py_OPCODE(next)]; if (tp == &PyListIter_Type) { - _Py_SET_OPCODE(*instr, FOR_ITER_LIST); + _py_set_opcode(instr, FOR_ITER_LIST); + goto success; + } + else if (tp == &PyTupleIter_Type) { + _py_set_opcode(instr, FOR_ITER_TUPLE); goto success; } else if (tp == &PyRangeIter_Type && next_op == STORE_FAST) { - _Py_SET_OPCODE(*instr, FOR_ITER_RANGE); + _py_set_opcode(instr, FOR_ITER_RANGE); + goto success; + } + else if (tp == &PyGen_Type && oparg <= SHRT_MAX) { + assert(_Py_OPCODE(instr[oparg + OPSIZE + INLINE_CACHE_ENTRIES_FOR_ITER]) == END_FOR); + _py_set_opcode(instr, FOR_ITER_GEN); goto success; } SPECIALIZATION_FAIL(FOR_ITER, _PySpecialization_ClassifyIterator(iter)); STAT_INC(FOR_ITER, failure); + _py_set_opcode(instr, FOR_ITER); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(FOR_ITER, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index c31bf1ed09f66a..4e7dfb14d19dec 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -96,9 +96,7 @@ static const char* _Py_stdlib_module_names[] = { "argparse", "array", "ast", -"asynchat", "asyncio", -"asyncore", "atexit", "audioop", "base64", @@ -136,7 +134,6 @@ static const char* _Py_stdlib_module_names[] = { "decimal", "difflib", "dis", -"distutils", "doctest", "email", "encodings", diff --git a/Python/structmember.c b/Python/structmember.c index c7e318811d82b8..1b8be28dcf2eb2 100644 --- a/Python/structmember.c +++ b/Python/structmember.c @@ -49,8 +49,7 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) break; case T_STRING: if (*(char**)addr == NULL) { - Py_INCREF(Py_None); - v = Py_None; + v = Py_NewRef(Py_None); } else v = PyUnicode_FromString(*(char**)addr); @@ -75,7 +74,7 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) PyErr_Format(PyExc_AttributeError, "'%.200s' object has no attribute '%s'", tp->tp_name, l->name); - } + } Py_XINCREF(v); break; case T_LONGLONG: @@ -85,8 +84,7 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) v = PyLong_FromUnsignedLongLong(*(unsigned long long *)addr); break; case T_NONE: - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); break; default: PyErr_SetString(PyExc_SystemError, "bad memberdescr type"); @@ -247,9 +245,8 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) break; case T_OBJECT: case T_OBJECT_EX: - Py_XINCREF(v); oldv = *(PyObject **)addr; - *(PyObject **)addr = v; + *(PyObject **)addr = Py_XNewRef(v); Py_XDECREF(oldv); break; case T_CHAR: { diff --git a/Python/suggestions.c b/Python/suggestions.c index 82376b6cd985a3..f2c018ef2c4533 100644 --- a/Python/suggestions.c +++ b/Python/suggestions.c @@ -1,5 +1,7 @@ #include "Python.h" #include "pycore_frame.h" +#include "pycore_runtime.h" // _PyRuntime +#include "pycore_global_objects.h" // _Py_ID() #include "pycore_pyerrors.h" #include "pycore_code.h" // _PyCode_GetVarnames() @@ -39,10 +41,8 @@ substitution_cost(char a, char b) static Py_ssize_t levenshtein_distance(const char *a, size_t a_size, const char *b, size_t b_size, - size_t max_cost) + size_t max_cost, size_t *buffer) { - static size_t buffer[MAX_STRING_SIZE]; - // Both strings are the same (by identity) if (a == b) { return 0; @@ -145,12 +145,16 @@ calculate_suggestions(PyObject *dir, if (name_str == NULL) { return NULL; } - + size_t *buffer = PyMem_New(size_t, MAX_STRING_SIZE); + if (buffer == NULL) { + return PyErr_NoMemory(); + } for (int i = 0; i < dir_size; ++i) { PyObject *item = PyList_GET_ITEM(dir, i); Py_ssize_t item_size; const char *item_str = PyUnicode_AsUTF8AndSize(item, &item_size); if (item_str == NULL) { + PyMem_Free(buffer); return NULL; } if (PyUnicode_CompareWithASCIIString(name, item_str) == 0) { @@ -161,8 +165,8 @@ calculate_suggestions(PyObject *dir, // Don't take matches we've already beaten. max_distance = Py_MIN(max_distance, suggestion_distance - 1); Py_ssize_t current_distance = - levenshtein_distance(name_str, name_size, - item_str, item_size, max_distance); + levenshtein_distance(name_str, name_size, item_str, + item_size, max_distance, buffer); if (current_distance > max_distance) { continue; } @@ -171,8 +175,8 @@ calculate_suggestions(PyObject *dir, suggestion_distance = current_distance; } } - Py_XINCREF(suggestion); - return suggestion; + PyMem_Free(buffer); + return Py_XNewRef(suggestion); } static PyObject * @@ -226,6 +230,24 @@ get_suggestions_for_name_error(PyObject* name, PyFrameObject* frame) return NULL; } + // Are we inside a method and the instance has an attribute called 'name'? + if (PySequence_Contains(dir, &_Py_ID(self)) > 0) { + PyObject* locals = PyFrame_GetLocals(frame); + if (!locals) { + goto error; + } + PyObject* self = PyDict_GetItem(locals, &_Py_ID(self)); /* borrowed */ + Py_DECREF(locals); + if (!self) { + goto error; + } + + if (PyObject_HasAttr(self, name)) { + Py_DECREF(dir); + return PyUnicode_FromFormat("self.%S", name); + } + } + PyObject *suggestions = calculate_suggestions(dir, name); Py_DECREF(dir); if (suggestions != NULL) { @@ -250,6 +272,10 @@ get_suggestions_for_name_error(PyObject* name, PyFrameObject* frame) Py_DECREF(dir); return suggestions; + +error: + Py_DECREF(dir); + return NULL; } static bool @@ -378,6 +404,14 @@ _Py_UTF8_Edit_Cost(PyObject *a, PyObject *b, Py_ssize_t max_cost) if (max_cost == -1) { max_cost = MOVE_COST * Py_MAX(size_a, size_b); } - return levenshtein_distance(utf8_a, size_a, utf8_b, size_b, max_cost); + size_t *buffer = PyMem_New(size_t, MAX_STRING_SIZE); + if (buffer == NULL) { + PyErr_NoMemory(); + return -1; + } + Py_ssize_t res = levenshtein_distance(utf8_a, size_a, + utf8_b, size_b, max_cost, buffer); + PyMem_Free(buffer); + return res; } diff --git a/Python/symtable.c b/Python/symtable.c index 342f5a080d3ddc..fb2bb7d83835de 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -74,8 +74,7 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, ste->ste_table = st; ste->ste_id = k; /* ste owns reference to k */ - Py_INCREF(name); - ste->ste_name = name; + ste->ste_name = Py_NewRef(name); ste->ste_symbols = NULL; ste->ste_varnames = NULL; @@ -286,8 +285,7 @@ _PySymtable_Build(mod_ty mod, PyObject *filename, PyFutureFeatures *future) _PySymtable_Free(st); return NULL; } - Py_INCREF(filename); - st->st_filename = filename; + st->st_filename = Py_NewRef(filename); st->st_future = future; /* Setup recursion depth check counters */ @@ -375,17 +373,17 @@ PySymtable_Lookup(struct symtable *st, void *key) if (k == NULL) return NULL; v = PyDict_GetItemWithError(st->st_blocks, k); + Py_DECREF(k); + if (v) { assert(PySTEntry_Check(v)); - Py_INCREF(v); } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_KeyError, "unknown symbol table entry"); } - Py_DECREF(k); - return (PySTEntryObject *)v; + return (PySTEntryObject *)Py_XNewRef(v); } long @@ -1949,8 +1947,7 @@ symtable_visit_alias(struct symtable *st, alias_ty a) return 0; } else { - store_name = name; - Py_INCREF(store_name); + store_name = Py_NewRef(name); } if (!_PyUnicode_EqualToASCIIString(name, "*")) { int r = symtable_add_def(st, store_name, DEF_IMPORT, LOCATION(a)); @@ -2144,14 +2141,13 @@ _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, _PyArena_Free(arena); return NULL; } - PyFutureFeatures *future = _PyFuture_FromAST(mod, filename); - if (future == NULL) { + PyFutureFeatures future; + if (!_PyFuture_FromAST(mod, filename, &future)) { _PyArena_Free(arena); return NULL; } - future->ff_features |= flags->cf_flags; - st = _PySymtable_Build(mod, filename, future); - PyObject_Free((void *)future); + future.ff_features |= flags->cf_flags; + st = _PySymtable_Build(mod, filename, &future); _PyArena_Free(arena); return st; } diff --git a/Python/sysmodule.c b/Python/sysmodule.c index f73d332796c986..91f5c487c98fe3 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -17,7 +17,6 @@ Data members: #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _PyEval_SetAsyncGenFinalizer() -#include "pycore_code.h" // _Py_QuickenedCount #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_initconfig.h" // _PyStatus_EXCEPTION() #include "pycore_long.h" // _PY_LONG_MAX_STR_DIGITS_THRESHOLD @@ -199,8 +198,7 @@ sys_audit_tstate(PyThreadState *ts, const char *event, eventArgs = _Py_VaBuildValue_SizeT(argFormat, vargs); if (eventArgs && !PyTuple_Check(eventArgs)) { PyObject *argTuple = PyTuple_Pack(1, eventArgs); - Py_DECREF(eventArgs); - eventArgs = argTuple; + Py_SETREF(eventArgs, argTuple); } } else { @@ -432,6 +430,8 @@ sys_addaudithook_impl(PyObject *module, PyObject *hook) if (interp->audit_hooks == NULL) { return NULL; } + /* Avoid having our list of hooks show up in the GC module */ + PyObject_GC_UnTrack(interp->audit_hooks); } if (PyList_Append(interp->audit_hooks, hook) < 0) { @@ -839,8 +839,7 @@ sys_getdefaultencoding_impl(PyObject *module) { _Py_DECLARE_STR(utf_8, "utf-8"); PyObject *ret = &_Py_STR(utf_8); - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } /*[clinic input] @@ -951,10 +950,6 @@ static int profile_trampoline(PyObject *self, PyFrameObject *frame, int what, PyObject *arg) { - if (arg == NULL) { - arg = Py_None; - } - PyThreadState *tstate = _PyThreadState_GET(); PyObject *result = call_trampoline(tstate, self, frame, what, arg); if (result == NULL) { @@ -1069,8 +1064,7 @@ sys_gettrace_impl(PyObject *module) if (temp == NULL) temp = Py_None; - Py_INCREF(temp); - return temp; + return Py_NewRef(temp); } static PyObject * @@ -1143,8 +1137,7 @@ sys_getprofile_impl(PyObject *module) if (temp == NULL) temp = Py_None; - Py_INCREF(temp); - return temp; + return Py_NewRef(temp); } @@ -1369,11 +1362,8 @@ sys_get_asyncgen_hooks_impl(PyObject *module) finalizer = Py_None; } - Py_INCREF(firstiter); - PyStructSequence_SET_ITEM(res, 0, firstiter); - - Py_INCREF(finalizer); - PyStructSequence_SET_ITEM(res, 1, finalizer); + PyStructSequence_SET_ITEM(res, 0, Py_NewRef(firstiter)); + PyStructSequence_SET_ITEM(res, 1, Py_NewRef(finalizer)); return res; } @@ -1806,8 +1796,7 @@ sys_getsizeof(PyObject *self, PyObject *args, PyObject *kwds) /* Has a default value been given */ if (dflt != NULL && _PyErr_ExceptionMatches(tstate, PyExc_TypeError)) { _PyErr_Clear(tstate); - Py_INCREF(dflt); - return dflt; + return Py_NewRef(dflt); } else return NULL; @@ -1855,17 +1844,6 @@ sys_gettotalrefcount_impl(PyObject *module) #endif /* Py_REF_DEBUG */ -/*[clinic input] -sys._getquickenedcount -> Py_ssize_t -[clinic start generated code]*/ - -static Py_ssize_t -sys__getquickenedcount_impl(PyObject *module) -/*[clinic end generated code: output=1ab259e7f91248a2 input=249d448159eca912]*/ -{ - return _Py_QuickenedCount; -} - /*[clinic input] sys.getallocatedblocks -> Py_ssize_t @@ -2216,7 +2194,6 @@ static PyMethodDef sys_methods[] = { SYS_GETALLOCATEDBLOCKS_METHODDEF SYS_GETFILESYSTEMENCODING_METHODDEF SYS_GETFILESYSTEMENCODEERRORS_METHODDEF - SYS__GETQUICKENEDCOUNT_METHODDEF #ifdef Py_TRACE_REFS {"getobjects", _Py_GetObjects, METH_VARARGS}, #endif @@ -2272,8 +2249,9 @@ list_builtin_module_names(void) if (list == NULL) { return NULL; } - for (Py_ssize_t i = 0; PyImport_Inittab[i].name != NULL; i++) { - PyObject *name = PyUnicode_FromString(PyImport_Inittab[i].name); + struct _inittab *inittab = _PyRuntime.imports.inittab; + for (Py_ssize_t i = 0; inittab[i].name != NULL; i++) { + PyObject *name = PyUnicode_FromString(inittab[i].name); if (name == NULL) { goto error; } @@ -2585,8 +2563,7 @@ _PySys_AddXOptionWithError(const wchar_t *s) const wchar_t *name_end = wcschr(s, L'='); if (!name_end) { name = PyUnicode_FromWideChar(s, -1); - value = Py_True; - Py_INCREF(value); + value = Py_NewRef(Py_True); } else { name = PyUnicode_FromWideChar(s, name_end - s); @@ -3041,8 +3018,7 @@ make_emscripten_info(void) } PyStructSequence_SET_ITEM(emscripten_info, pos++, oua); } else { - Py_INCREF(Py_None); - PyStructSequence_SET_ITEM(emscripten_info, pos++, Py_None); + PyStructSequence_SET_ITEM(emscripten_info, pos++, Py_NewRef(Py_None)); } #define SetBoolItem(flag) \ @@ -3239,8 +3215,7 @@ sys_add_xoption(PyObject *opts, const wchar_t *s) const wchar_t *name_end = wcschr(s, L'='); if (!name_end) { name = PyUnicode_FromWideChar(s, -1); - value = Py_True; - Py_INCREF(value); + value = Py_NewRef(Py_True); } else { name = PyUnicode_FromWideChar(s, name_end - s); @@ -3417,8 +3392,7 @@ _PySys_Create(PyThreadState *tstate, PyObject **sysmod_p) if (sysdict == NULL) { goto error; } - Py_INCREF(sysdict); - interp->sysdict = sysdict; + interp->sysdict = Py_NewRef(sysdict); if (PyDict_SetItemString(sysdict, "modules", interp->modules) < 0) { goto error; diff --git a/Python/thread.c b/Python/thread.c index 8c8a4e81895eb6..4581f1af043a37 100644 --- a/Python/thread.c +++ b/Python/thread.c @@ -8,15 +8,7 @@ #include "Python.h" #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_structseq.h" // _PyStructSequence_FiniType() - -#ifndef _POSIX_THREADS -/* This means pthreads are not implemented in libc headers, hence the macro - not present in unistd.h. But they still can be implemented as an external - library (e.g. gnu pth in pthread emulation) */ -# ifdef HAVE_PTHREAD_H -# include <pthread.h> /* _POSIX_THREADS */ -# endif -#endif +#include "pycore_pythread.h" #ifndef DONT_HAVE_STDIO_H #include <stdio.h> @@ -24,33 +16,17 @@ #include <stdlib.h> -#ifndef _POSIX_THREADS - -/* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then - enough of the Posix threads package is implemented to support python - threads. - - This is valid for HP-UX 11.23 running on an ia64 system. If needed, add - a check of __ia64 to verify that we're running on an ia64 system instead - of a pa-risc system. -*/ -#ifdef __hpux -#ifdef _SC_THREADS -#define _POSIX_THREADS -#endif -#endif - -#endif /* _POSIX_THREADS */ - -static int initialized; static void PyThread__init_thread(void); /* Forward */ +#define initialized _PyRuntime.threads.initialized + void PyThread_init_thread(void) { - if (initialized) + if (initialized) { return; + } initialized = 1; PyThread__init_thread(); } @@ -58,7 +34,7 @@ PyThread_init_thread(void) #if defined(HAVE_PTHREAD_STUBS) # define PYTHREAD_NAME "pthread-stubs" # include "thread_pthread_stubs.h" -#elif defined(_POSIX_THREADS) +#elif defined(_USE_PTHREADS) /* AKA _PTHREADS */ # if defined(__EMSCRIPTEN__) && !defined(__EMSCRIPTEN_PTHREADS__) # define PYTHREAD_NAME "pthread-stubs" # else @@ -207,8 +183,7 @@ PyThread_GetInfo(void) if (value == NULL) #endif { - Py_INCREF(Py_None); - value = Py_None; + value = Py_NewRef(Py_None); } PyStructSequence_SET_ITEM(threadinfo, pos++, value); return threadinfo; diff --git a/Python/thread_nt.h b/Python/thread_nt.h index d1f1323948a6c6..26f441bd6d3c56 100644 --- a/Python/thread_nt.h +++ b/Python/thread_nt.h @@ -152,11 +152,12 @@ unsigned long PyThread_get_thread_native_id(void); #endif /* - * Initialization of the C package, should not be needed. + * Initialization for the current runtime. */ static void PyThread__init_thread(void) { + // Initialization of the C package should not be needed. } /* diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h index 1c5b320813af83..76d6f3bcdf9c40 100644 --- a/Python/thread_pthread.h +++ b/Python/thread_pthread.h @@ -119,24 +119,21 @@ * pthread_cond support */ -#if defined(HAVE_PTHREAD_CONDATTR_SETCLOCK) && defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_MONOTONIC) -// monotonic is supported statically. It doesn't mean it works on runtime. -#define CONDATTR_MONOTONIC -#endif - -// NULL when pthread_condattr_setclock(CLOCK_MONOTONIC) is not supported. -static pthread_condattr_t *condattr_monotonic = NULL; +#define condattr_monotonic _PyRuntime.threads._condattr_monotonic.ptr static void init_condattr(void) { #ifdef CONDATTR_MONOTONIC - static pthread_condattr_t ca; +# define ca _PyRuntime.threads._condattr_monotonic.val + // XXX We need to check the return code? pthread_condattr_init(&ca); + // XXX We need to run pthread_condattr_destroy() during runtime fini. if (pthread_condattr_setclock(&ca, CLOCK_MONOTONIC) == 0) { condattr_monotonic = &ca; // Use monotonic clock } -#endif +# undef ca +#endif // CONDATTR_MONOTONIC } int @@ -192,15 +189,21 @@ typedef struct { "%s: %s\n", name, strerror(status)); error = 1; } /* - * Initialization. + * Initialization for the current runtime. */ static void PyThread__init_thread(void) { + // The library is only initialized once in the process, + // regardless of how many times the Python runtime is initialized. + static int lib_initialized = 0; + if (!lib_initialized) { + lib_initialized = 1; #if defined(_AIX) && defined(__GNUC__) - extern void pthread_init(void); - pthread_init(); + extern void pthread_init(void); + pthread_init(); #endif + } init_condattr(); } diff --git a/Python/thread_pthread_stubs.h b/Python/thread_pthread_stubs.h index 8b80c0f87e2509..56e5b6141924b4 100644 --- a/Python/thread_pthread_stubs.h +++ b/Python/thread_pthread_stubs.h @@ -124,13 +124,10 @@ pthread_attr_destroy(pthread_attr_t *attr) return 0; } -// pthread_key -typedef struct { - bool in_use; - void *value; -} py_tls_entry; -static py_tls_entry py_tls_entries[PTHREAD_KEYS_MAX] = {0}; +typedef struct py_stub_tls_entry py_tls_entry; + +#define py_tls_entries (_PyRuntime.threads.stubs.tls_entries) int pthread_key_create(pthread_key_t *key, void (*destr_function)(void *)) diff --git a/Python/traceback.c b/Python/traceback.c index de658b9103180e..da26c9b260a3bd 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -52,10 +52,8 @@ tb_create_raw(PyTracebackObject *next, PyFrameObject *frame, int lasti, } tb = PyObject_GC_New(PyTracebackObject, &PyTraceBack_Type); if (tb != NULL) { - Py_XINCREF(next); - tb->tb_next = next; - Py_XINCREF(frame); - tb->tb_frame = frame; + tb->tb_next = (PyTracebackObject*)Py_XNewRef(next); + tb->tb_frame = (PyFrameObject*)Py_XNewRef(frame); tb->tb_lasti = lasti; tb->tb_lineno = lineno; PyObject_GC_Track(tb); @@ -106,8 +104,7 @@ tb_next_get(PyTracebackObject *self, void *Py_UNUSED(_)) if (!ret) { ret = Py_None; } - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } static int @@ -139,10 +136,7 @@ tb_next_set(PyTracebackObject *self, PyObject *new_next, void *Py_UNUSED(_)) cursor = cursor->tb_next; } - PyObject *old_next = (PyObject*)self->tb_next; - Py_XINCREF(new_next); - self->tb_next = (PyTracebackObject *)new_next; - Py_XDECREF(old_next); + Py_XSETREF(self->tb_next, (PyTracebackObject *)Py_XNewRef(new_next)); return 0; } @@ -522,8 +516,7 @@ display_source_line_with_margin(PyObject *f, PyObject *filename, int lineno, int } if (line) { - Py_INCREF(lineobj); - *line = lineobj; + *line = Py_NewRef(lineobj); } /* remove the indentation of the line */ @@ -538,8 +531,7 @@ display_source_line_with_margin(PyObject *f, PyObject *filename, int lineno, int PyObject *truncated; truncated = PyUnicode_Substring(lineobj, i, PyUnicode_GET_LENGTH(lineobj)); if (truncated) { - Py_DECREF(lineobj); - lineobj = truncated; + Py_SETREF(lineobj, truncated); } else { PyErr_Clear(); } @@ -705,8 +697,13 @@ extract_anchors_from_line(PyObject *filename, PyObject *line, done: if (res > 0) { - *left_anchor += start_offset; - *right_anchor += start_offset; + // Normalize the AST offsets to byte offsets and adjust them with the + // start of the actual line (instead of the source code segment). + assert(segment != NULL); + assert(*left_anchor >= 0); + assert(*right_anchor >= 0); + *left_anchor = _PyPegen_byte_offset_to_character_offset(segment, *left_anchor) + start_offset; + *right_anchor = _PyPegen_byte_offset_to_character_offset(segment, *right_anchor) + start_offset; } Py_XDECREF(segment); if (arena) { @@ -1229,6 +1226,15 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) if (frame == NULL) { break; } + if (frame->owner == FRAME_OWNED_BY_CSTACK) { + /* Trampoline frame */ + frame = frame->previous; + } + if (frame == NULL) { + break; + } + /* Can't have more than one shim frame in a row */ + assert(frame->owner != FRAME_OWNED_BY_CSTACK); depth++; } } diff --git a/README.rst b/README.rst index 577dccde70beed..ab9d3a6ea71cf6 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.12.0 alpha 1 +This is Python version 3.12.0 alpha 3 ===================================== .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py index 28ac2b12f92fa7..f6ae2d12e56a17 100644 --- a/Tools/build/deepfreeze.py +++ b/Tools/build/deepfreeze.py @@ -44,6 +44,7 @@ def make_string_literal(b: bytes) -> str: CO_FAST_CELL = 0x40 CO_FAST_FREE = 0x80 +next_code_version = 1 def get_localsplus(code: types.CodeType): a = collections.defaultdict(int) @@ -114,9 +115,8 @@ def __init__(self, file: TextIO) -> None: self.file = file self.cache: Dict[tuple[type, object, str], str] = {} self.hits, self.misses = 0, 0 - self.patchups: list[str] = [] - self.deallocs: list[str] = [] - self.interns: list[str] = [] + self.finis: list[str] = [] + self.inits: list[str] = [] self.write('#include "Python.h"') self.write('#include "internal/pycore_gc.h"') self.write('#include "internal/pycore_code.h"') @@ -228,6 +228,7 @@ def generate_unicode(self, name: str, s: str) -> str: def generate_code(self, name: str, code: types.CodeType) -> str: + global next_code_version # The ordering here matches PyCode_NewWithPosOnlyArgs() # (but see below). co_consts = self.generate(name + "_consts", code.co_consts) @@ -257,12 +258,11 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f".co_names = {co_names},") self.write(f".co_exceptiontable = {co_exceptiontable},") self.field(code, "co_flags") - self.write(".co_warmup = QUICKENING_INITIAL_WARMUP_VALUE,") self.write("._co_linearray_entry_size = 0,") self.field(code, "co_argcount") self.field(code, "co_posonlyargcount") self.field(code, "co_kwonlyargcount") - self.write(f".co_framesize = {code.co_stacksize + len(localsplusnames)} + FRAME_SPECIALS_SIZE,") + self.write(f".co_framesize = {code.co_stacksize + len(localsplusnames) + len(co_consts)} + FRAME_SPECIALS_SIZE,") self.field(code, "co_stacksize") self.field(code, "co_firstlineno") self.write(f".co_nlocalsplus = {len(localsplusnames)},") @@ -270,6 +270,8 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f".co_nplaincellvars = {nplaincellvars},") self.write(f".co_ncellvars = {ncellvars},") self.write(f".co_nfreevars = {nfreevars},") + self.write(f".co_version = {next_code_version},") + next_code_version += 1 self.write(f".co_localsplusnames = {co_localsplusnames},") self.write(f".co_localspluskinds = {co_localspluskinds},") self.write(f".co_filename = {co_filename},") @@ -284,8 +286,8 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f"._co_firsttraceable = {i},") break name_as_code = f"(PyCodeObject *)&{name}" - self.deallocs.append(f"_PyStaticCode_Dealloc({name_as_code});") - self.interns.append(f"_PyStaticCode_InternStrings({name_as_code})") + self.finis.append(f"_PyStaticCode_Fini({name_as_code});") + self.inits.append(f"_PyStaticCode_Init({name_as_code})") return f"& {name}.ob_base.ob_base" def generate_tuple(self, name: str, t: Tuple[object, ...]) -> str: @@ -373,11 +375,7 @@ def generate_frozenset(self, name: str, fs: FrozenSet[object]) -> str: def generate_file(self, module: str, code: object)-> None: module = module.replace(".", "_") self.generate(f"{module}_toplevel", code) - with self.block(f"static void {module}_do_patchups(void)"): - for p in self.patchups: - self.write(p) - self.patchups.clear() - self.write(EPILOGUE.replace("%%NAME%%", module)) + self.write(EPILOGUE.format(name=module)) def generate(self, name: str, obj: object) -> str: # Use repr() in the key to distinguish -0.0 from +0.0 @@ -421,11 +419,10 @@ def generate(self, name: str, obj: object) -> str: EPILOGUE = """ PyObject * -_Py_get_%%NAME%%_toplevel(void) -{ - %%NAME%%_do_patchups(); - return Py_NewRef((PyObject *) &%%NAME%%_toplevel); -} +_Py_get_{name}_toplevel(void) +{{ + return Py_NewRef((PyObject *) &{name}_toplevel); +}} """ FROZEN_COMMENT_C = "/* Auto-generated by Programs/_freeze_module.c */" @@ -461,13 +458,14 @@ def generate(args: list[str], output: TextIO) -> None: code = compile(fd.read(), f"<frozen {modname}>", "exec") printer.generate_file(modname, code) with printer.block(f"void\n_Py_Deepfreeze_Fini(void)"): - for p in printer.deallocs: + for p in printer.finis: printer.write(p) with printer.block(f"int\n_Py_Deepfreeze_Init(void)"): - for p in printer.interns: + for p in printer.inits: with printer.block(f"if ({p} < 0)"): printer.write("return -1;") printer.write("return 0;") + printer.write(f"\nuint32_t _Py_next_func_version = {next_code_version};\n") if verbose: print(f"Cache hits: {printer.hits}, misses: {printer.misses}") diff --git a/Tools/build/generate_global_objects.py b/Tools/build/generate_global_objects.py index dd67cfedaddd26..9ceae89878cda8 100644 --- a/Tools/build/generate_global_objects.py +++ b/Tools/build/generate_global_objects.py @@ -123,6 +123,15 @@ '__rdivmod__', ] +NON_GENERATED_IMMORTAL_OBJECTS = [ + # The generated ones come from generate_runtime_init(). + '(PyObject *)&_Py_SINGLETON(bytes_empty)', + '(PyObject *)&_Py_SINGLETON(tuple_empty)', + '(PyObject *)&_Py_SINGLETON(hamt_bitmap_node_empty)', + '(PyObject *)&_Py_INTERP_SINGLETON(interp, hamt_empty)', + '(PyObject *)&_Py_SINGLETON(context_token_missing)', +] + ####################################### # helpers @@ -287,50 +296,63 @@ def generate_runtime_init(identifiers, strings): printer = Printer(outfile) printer.write(before) printer.write(START) - with printer.block('#define _Py_global_objects_INIT', continuation=True): - with printer.block('.singletons =', ','): - # Global int objects. - with printer.block('.small_ints =', ','): - for i in range(-nsmallnegints, nsmallposints): - printer.write(f'_PyLong_DIGIT_INIT({i}),') - immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + {i}]') - printer.write('') - # Global bytes objects. - printer.write('.bytes_empty = _PyBytes_SIMPLE_INIT(0, 0),') - immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(bytes_empty)') - with printer.block('.bytes_characters =', ','): - for i in range(256): - printer.write(f'_PyBytes_CHAR_INIT({i}),') - immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(bytes_characters)[{i}]') - printer.write('') - # Global strings. - with printer.block('.strings =', ','): - with printer.block('.literals =', ','): - for literal, name in sorted(strings.items(), key=lambda x: x[1]): - printer.write(f'INIT_STR({name}, "{literal}"),') - immortal_objects.append(f'(PyObject *)&_Py_STR({name})') - with printer.block('.identifiers =', ','): - for name in sorted(identifiers): - assert name.isidentifier(), name - printer.write(f'INIT_ID({name}),') - immortal_objects.append(f'(PyObject *)&_Py_ID({name})') - with printer.block('.ascii =', ','): - for i in range(128): - printer.write(f'_PyASCIIObject_INIT("\\x{i:02x}"),') - immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(strings).ascii[{i}]') - with printer.block('.latin1 =', ','): - for i in range(128, 256): - utf8 = ['"'] - for c in chr(i).encode('utf-8'): - utf8.append(f"\\x{c:02x}") - utf8.append('"') - printer.write(f'_PyUnicode_LATIN1_INIT("\\x{i:02x}", {"".join(utf8)}),') - immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(strings).latin1[{i} - 128]') - printer.write('') - with printer.block('.tuple_empty =', ','): - printer.write('.ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0)') - immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(tuple_empty)') + with printer.block('#define _Py_small_ints_INIT', continuation=True): + for i in range(-nsmallnegints, nsmallposints): + printer.write(f'_PyLong_DIGIT_INIT({i}),') + immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + {i}]') + printer.write('') + with printer.block('#define _Py_bytes_characters_INIT', continuation=True): + for i in range(256): + printer.write(f'_PyBytes_CHAR_INIT({i}),') + immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(bytes_characters)[{i}]') + printer.write('') + with printer.block('#define _Py_str_literals_INIT', continuation=True): + for literal, name in sorted(strings.items(), key=lambda x: x[1]): + printer.write(f'INIT_STR({name}, "{literal}"),') + immortal_objects.append(f'(PyObject *)&_Py_STR({name})') + printer.write('') + with printer.block('#define _Py_str_identifiers_INIT', continuation=True): + for name in sorted(identifiers): + assert name.isidentifier(), name + printer.write(f'INIT_ID({name}),') + immortal_objects.append(f'(PyObject *)&_Py_ID({name})') + printer.write('') + with printer.block('#define _Py_str_ascii_INIT', continuation=True): + for i in range(128): + printer.write(f'_PyASCIIObject_INIT("\\x{i:02x}"),') + immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(strings).ascii[{i}]') printer.write('') + with printer.block('#define _Py_str_latin1_INIT', continuation=True): + for i in range(128, 256): + utf8 = ['"'] + for c in chr(i).encode('utf-8'): + utf8.append(f"\\x{c:02x}") + utf8.append('"') + printer.write(f'_PyUnicode_LATIN1_INIT("\\x{i:02x}", {"".join(utf8)}),') + immortal_objects.append(f'(PyObject *)&_Py_SINGLETON(strings).latin1[{i} - 128]') + printer.write(END) + printer.write(after) + return immortal_objects + + +def generate_static_strings_initializer(identifiers, strings): + # Target the runtime initializer. + filename = os.path.join(INTERNAL, 'pycore_unicodeobject_generated.h') + + # Read the non-generated part of the file. + with open(filename) as infile: + orig = infile.read() + lines = iter(orig.rstrip().splitlines()) + before = '\n'.join(iter_to_marker(lines, START)) + for _ in iter_to_marker(lines, END): + pass + after = '\n'.join(lines) + + # Generate the file. + with open_for_changes(filename, orig) as outfile: + printer = Printer(outfile) + printer.write(before) + printer.write(START) printer.write("static inline void") with printer.block("_PyUnicode_InitStaticStrings(void)"): printer.write(f'PyObject *string;') @@ -339,16 +361,41 @@ def generate_runtime_init(identifiers, strings): # since iter_files() ignores .h files. printer.write(f'string = &_Py_ID({i});') printer.write(f'PyUnicode_InternInPlace(&string);') - printer.write('') + # XXX What about "strings"? + printer.write(END) + printer.write(after) + + +def generate_global_object_finalizers(generated_immortal_objects): + # Target the runtime initializer. + filename = os.path.join(INTERNAL, 'pycore_global_objects_fini_generated.h') + + # Read the non-generated part of the file. + with open(filename) as infile: + orig = infile.read() + lines = iter(orig.rstrip().splitlines()) + before = '\n'.join(iter_to_marker(lines, START)) + for _ in iter_to_marker(lines, END): + pass + after = '\n'.join(lines) + + # Generate the file. + with open_for_changes(filename, orig) as outfile: + printer = Printer(outfile) + printer.write(before) + printer.write(START) printer.write('#ifdef Py_DEBUG') printer.write("static inline void") - with printer.block("_PyStaticObjects_CheckRefcnt(void)"): - for i in immortal_objects: - with printer.block(f'if (Py_REFCNT({i}) < _PyObject_IMMORTAL_REFCNT)', ';'): - printer.write(f'_PyObject_Dump({i});') - printer.write(f'Py_FatalError("immortal object has less refcnt than ' - 'expected _PyObject_IMMORTAL_REFCNT");') - printer.write('#endif') + with printer.block( + "_PyStaticObjects_CheckRefcnt(PyInterpreterState *interp)"): + printer.write('/* generated runtime-global */') + printer.write('// (see pycore_runtime_init_generated.h)') + for ref in generated_immortal_objects: + printer.write(f'_PyStaticObject_CheckRefcnt({ref});') + printer.write('/* non-generated */') + for ref in NON_GENERATED_IMMORTAL_OBJECTS: + printer.write(f'_PyStaticObject_CheckRefcnt({ref});') + printer.write('#endif // Py_DEBUG') printer.write(END) printer.write(after) @@ -375,7 +422,9 @@ def main() -> None: identifiers, strings = get_identifiers_and_strings() generate_global_strings(identifiers, strings) - generate_runtime_init(identifiers, strings) + generated_immortal_objects = generate_runtime_init(identifiers, strings) + generate_static_strings_initializer(identifiers, strings) + generate_global_object_finalizers(generated_immortal_objects) if __name__ == '__main__': diff --git a/Tools/build/generate_levenshtein_examples.py b/Tools/build/generate_levenshtein_examples.py index 5a8360fff7315a..778eb458c541c0 100644 --- a/Tools/build/generate_levenshtein_examples.py +++ b/Tools/build/generate_levenshtein_examples.py @@ -1,7 +1,7 @@ """Generate 10,000 unique examples for the Levenshtein short-circuit tests.""" import argparse -from functools import cache +from functools import lru_cache import json import os.path from random import choices, randrange @@ -22,7 +22,7 @@ def _substitution_cost(ch_a, ch_b): return _MOVE_COST -@cache +@lru_cache(None) def levenshtein(a, b): if not a or not b: return (len(a) + len(b)) * _MOVE_COST diff --git a/Tools/build/generate_opcode_h.py b/Tools/build/generate_opcode_h.py index 372221a14d07e1..ec5cd3519325ae 100644 --- a/Tools/build/generate_opcode_h.py +++ b/Tools/build/generate_opcode_h.py @@ -108,7 +108,7 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna opname_including_specialized[255] = 'DO_TRACING' used[255] = True - with (open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj): + with open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj: fobj.write(header) iobj.write(internal_header) @@ -171,6 +171,10 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna for i, (op, _) in enumerate(opcode["_nb_ops"]): fobj.write(DEFINE.format(op, i)) + fobj.write("\n") + fobj.write("/* number of codewords for opcode+oparg(s) */\n") + fobj.write("#define OPSIZE 2\n") + iobj.write("\n") iobj.write("#ifdef Py_DEBUG\n") iobj.write(f"static const char *const _PyOpcode_OpName[{NUM_OPCODES}] = {{\n") diff --git a/Tools/build/generate_stdlib_module_names.py b/Tools/build/generate_stdlib_module_names.py index e4f09f88d0a32a..c8a23f41fdd475 100644 --- a/Tools/build/generate_stdlib_module_names.py +++ b/Tools/build/generate_stdlib_module_names.py @@ -29,13 +29,14 @@ '_ctypes_test', '_testbuffer', '_testcapi', + '_testclinic', '_testconsole', '_testimportmultiple', '_testinternalcapi', '_testmultiphase', + '_testsinglephase', '_xxsubinterpreters', '_xxtestfuzz', - 'distutils.tests', 'idlelib.idle_test', 'test', 'xxlimited', diff --git a/Tools/c-analyzer/c_parser/_state_machine.py b/Tools/c-analyzer/c_parser/_state_machine.py index 53cbb13e7c4edb..875323188aadfd 100644 --- a/Tools/c-analyzer/c_parser/_state_machine.py +++ b/Tools/c-analyzer/c_parser/_state_machine.py @@ -96,7 +96,7 @@ def parse(srclines): # # end matched parens # ''') -''' +r''' # for loop (?: \s* \b for diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py index 78241f0ea08ac8..769e27432f2837 100644 --- a/Tools/c-analyzer/cpython/_parser.py +++ b/Tools/c-analyzer/cpython/_parser.py @@ -82,6 +82,10 @@ def clean_lines(text): # generated Python/deepfreeze/*.c Python/frozen_modules/*.h +Python/generated_cases.c.h + +# not actually source +Python/bytecodes.c # @end=conf@ ''') @@ -285,6 +289,7 @@ def clean_lines(text): SAME = { _abs('Include/*.h'): [_abs('Include/cpython/')], + _abs('Python/ceval.c'): ['Python/generated_cases.c.h'], } MAX_SIZES = { @@ -311,6 +316,7 @@ def clean_lines(text): _abs('Python/frozen_modules/*.h'): (20_000, 500), _abs('Python/opcode_targets.h'): (10_000, 500), _abs('Python/stdlib_module_names.h'): (5_000, 500), + _abs('Python/importlib.h'): (200_000, 5000), # These large files are currently ignored (see above). _abs('Modules/_ssl_data.h'): (80_000, 10_000), diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index e327f0a50c688e..479221cbd4b682 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -4,10 +4,10 @@ filename funcname name reason # These are all variables that we will be making non-global. ################################## -# global objects to fix in core code +## global objects to fix in core code -#----------------------- -# exported builtin types (C-API) +##----------------------- +## exported builtin types (C-API) Objects/boolobject.c - PyBool_Type - Objects/bytearrayobject.c - PyByteArrayIter_Type - @@ -102,8 +102,8 @@ Python/context.c - PyContextVar_Type - Python/context.c - PyContext_Type - Python/traceback.c - PyTraceBack_Type - -#----------------------- -# other exported builtin types +##----------------------- +## other exported builtin types # Not in a .h file: Objects/codeobject.c - _PyLineIterator - @@ -126,8 +126,8 @@ Python/hamt.c - _PyHamt_CollisionNode_Type - Python/hamt.c - _PyHamt_Type - Python/symtable.c - PySTEntry_Type - -#----------------------- -# private static builtin types +##----------------------- +## private static builtin types Objects/setobject.c - _PySetDummy_Type - Objects/stringlib/unicode_format.h - PyFormatterIter_Type - @@ -136,8 +136,8 @@ Objects/unicodeobject.c - EncodingMapType - #Objects/unicodeobject.c - PyFieldNameIter_Type - #Objects/unicodeobject.c - PyFormatterIter_Type - -#----------------------- -# static builtin structseq +##----------------------- +## static builtin structseq Objects/floatobject.c - FloatInfoType - Objects/longobject.c - Int_InfoType - @@ -148,8 +148,8 @@ Python/sysmodule.c - Hash_InfoType - Python/sysmodule.c - VersionInfoType - Python/thread.c - ThreadInfoType - -#----------------------- -# builtin exception types +##----------------------- +## builtin exception types Objects/exceptions.c - _PyExc_BaseException - Objects/exceptions.c - _PyExc_BaseExceptionGroup - @@ -286,8 +286,8 @@ Objects/exceptions.c - PyExc_BytesWarning - Objects/exceptions.c - PyExc_ResourceWarning - Objects/exceptions.c - PyExc_EncodingWarning - -#----------------------- -# singletons +##----------------------- +## singletons Objects/boolobject.c - _Py_FalseStruct - Objects/boolobject.c - _Py_TrueStruct - @@ -298,193 +298,18 @@ Objects/setobject.c - _dummy_struct - Objects/setobject.c - _PySet_Dummy - Objects/sliceobject.c - _Py_EllipsisObject - -#----------------------- -# cached - initialized once - -# manually cached PyUnicodeObject -Python/ast_unparse.c - _str_replace_inf - - -# holds statically-initialized strings -Objects/typeobject.c - slotdefs - - -# other -Objects/typeobject.c object___reduce_ex___impl objreduce - -Objects/unicodeobject.c - _string_module - -Objects/unicodeobject.c - interned - - -#----------------------- -# other - -# initialized once -Python/context.c - _token_missing - -Python/fileutils.c - _Py_open_cloexec_works - -Python/hamt.c - _empty_bitmap_node - -Python/hamt.c - _empty_hamt - - -# state -Objects/typeobject.c resolve_slotdups pname - -Python/import.c - extensions - - ################################## -# global non-objects to fix in core code - -#----------------------- -# initialized/set once - -# pre-allocated buffer -Modules/getbuildinfo.c Py_GetBuildInfo buildinfo - - -# during init -Objects/unicodeobject.c - bloom_linebreak - -Python/bootstrap_hash.c - _Py_HashSecret_Initialized - -Python/bootstrap_hash.c py_getrandom getrandom_works - -Python/initconfig.c - _Py_global_config_int_max_str_digits - -Python/initconfig.c - Py_DebugFlag - -Python/initconfig.c - Py_UTF8Mode - -Python/initconfig.c - Py_DebugFlag - -Python/initconfig.c - Py_VerboseFlag - -Python/initconfig.c - Py_QuietFlag - -Python/initconfig.c - Py_InteractiveFlag - -Python/initconfig.c - Py_InspectFlag - -Python/initconfig.c - Py_OptimizeFlag - -Python/initconfig.c - Py_NoSiteFlag - -Python/initconfig.c - Py_BytesWarningFlag - -Python/initconfig.c - Py_FrozenFlag - -Python/initconfig.c - Py_IgnoreEnvironmentFlag - -Python/initconfig.c - Py_DontWriteBytecodeFlag - -Python/initconfig.c - Py_NoUserSiteDirectory - -Python/initconfig.c - Py_UnbufferedStdioFlag - -Python/initconfig.c - Py_HashRandomizationFlag - -Python/initconfig.c - Py_IsolatedFlag - -Python/initconfig.c - Py_LegacyWindowsFSEncodingFlag - -Python/initconfig.c - Py_LegacyWindowsStdioFlag - -Python/initconfig.c - orig_argv - -Python/pyhash.c - _Py_HashSecret - -Python/pylifecycle.c - runtime_initialized - -Python/sysmodule.c - _PySys_ImplCacheTag - -Python/sysmodule.c - _PySys_ImplName - -Python/sysmodule.c - _preinit_warnoptions - -Python/sysmodule.c - _preinit_xoptions - -Python/thread.c - initialized - -Python/thread_pthread.h - condattr_monotonic - -Python/thread_pthread.h init_condattr ca - - -# set by embedders during init -Python/initconfig.c - _Py_StandardStreamEncoding - -Python/initconfig.c - _Py_StandardStreamErrors - - -# lazy -Objects/floatobject.c - double_format - -Objects/floatobject.c - float_format - -Objects/longobject.c long_from_non_binary_base log_base_BASE - -Objects/longobject.c long_from_non_binary_base convwidth_base - -Objects/longobject.c long_from_non_binary_base convmultmax_base - -Python/perf_trampoline.c - perf_map_file - -Objects/unicodeobject.c - ucnhash_capi - -Parser/action_helpers.c _PyPegen_dummy_name cache - -Python/dtoa.c - p5s - -Python/fileutils.c - force_ascii - -Python/fileutils.c set_inheritable ioctl_works - -Python/import.c - import_lock - -Python/import.c import_find_and_load header - - -#----------------------- -# unlikely to change after init (or main thread) - -# through C-API -Python/frozen.c - PyImport_FrozenModules - -Python/frozen.c - _PyImport_FrozenAliases - -Python/frozen.c - _PyImport_FrozenBootstrap - -Python/frozen.c - _PyImport_FrozenStdlib - -Python/frozen.c - _PyImport_FrozenTest - -Python/import.c - inittab_copy - -Python/import.c - PyImport_Inittab - -Python/preconfig.c - Py_FileSystemDefaultEncoding - -Python/preconfig.c - Py_HasFileSystemDefaultEncoding - -Python/preconfig.c - Py_FileSystemDefaultEncodeErrors - -Python/preconfig.c - _Py_HasFileSystemDefaultEncodeErrors - - -# REPL -Parser/myreadline.c - _PyOS_ReadlineLock - -Parser/myreadline.c - _PyOS_ReadlineTState - -Parser/myreadline.c - PyOS_InputHook - -Parser/myreadline.c - PyOS_ReadlineFunctionPointer - - -# handling C argv -Python/getopt.c - _PyOS_optarg - -Python/getopt.c - _PyOS_opterr - -Python/getopt.c - _PyOS_optind - -Python/getopt.c - opt_ptr - -Python/pathconfig.c - _Py_path_config - - -#----------------------- -# state - -# allocator -Objects/obmalloc.c - _PyObject_Arena - -Objects/obmalloc.c - _Py_tracemalloc_config - -Objects/obmalloc.c - arena_map_bot_count - -Objects/obmalloc.c - arena_map_mid_count - -Objects/obmalloc.c - arena_map_root - -Objects/obmalloc.c - arenas - -Objects/obmalloc.c - maxarenas - -Objects/obmalloc.c - narenas_currently_allocated - -Objects/obmalloc.c - narenas_highwater - -Objects/obmalloc.c - nfp2lasta - -Objects/obmalloc.c - ntimes_arena_allocated - -Objects/obmalloc.c - raw_allocated_blocks - -Objects/obmalloc.c - unused_arena_objects - -Objects/obmalloc.c - usable_arenas - -Objects/obmalloc.c new_arena debug_stats - - -# pre-allocated memory -Python/dtoa.c - freelist - -Python/dtoa.c - private_mem - - -# local buffer -Python/getversion.c Py_GetVersion version - -Python/suggestions.c levenshtein_distance buffer - - -# linked list -Python/dtoa.c - pmem_next - -Python/getargs.c - static_arg_parsers - - -# other -Objects/dictobject.c - _pydict_global_version - -Objects/dictobject.c - next_dict_keys_version - -Objects/funcobject.c - next_func_version - -Objects/moduleobject.c - max_module_number - -Objects/object.c - _Py_RefTotal - -Python/perf_trampoline.c - perf_status - -Python/perf_trampoline.c - extra_code_index - -Python/perf_trampoline.c - code_arena - -Python/perf_trampoline.c - trampoline_api - -Objects/typeobject.c - next_version_tag - -Objects/typeobject.c resolve_slotdups ptrs - -Parser/pegen.c - memo_statistics - -Python/bootstrap_hash.c - urandom_cache - -Python/ceval_gil.c make_pending_calls busy - -Python/ceval.c _PyEval_SetProfile reentrant - -Python/ceval.c _PyEval_SetTrace reentrant - -Python/import.c - import_lock_level - -Python/import.c - import_lock_thread - -Python/import.c import_find_and_load accumulated - -Python/import.c import_find_and_load import_level - -Python/modsupport.c - _Py_PackageContext - -Python/thread_pthread_stubs.h - py_tls_entries - -Python/pyfpe.c - PyFPE_counter - -Python/pylifecycle.c _Py_FatalErrorFormat reentrant - -Python/pylifecycle.c - _Py_UnhandledKeyboardInterrupt - -Python/pylifecycle.c fatal_error reentrant - -Python/specialize.c - _Py_QuickenedCount - +## global non-objects to fix in core code + +# <none> ################################## -# global objects to fix in builtin modules +## global objects to fix in builtin modules -#----------------------- -# static types +##----------------------- +## static types Modules/_collectionsmodule.c - defdict_type - Modules/_collectionsmodule.c - deque_type - @@ -512,6 +337,7 @@ Modules/_testcapi/vectorcall.c - MethodDescriptorNopGet_Type - Modules/_testcapi/vectorcall.c - MethodDescriptor2_Type - Modules/itertoolsmodule.c - _grouper_type - Modules/itertoolsmodule.c - accumulate_type - +Modules/itertoolsmodule.c - batched_type - Modules/itertoolsmodule.c - chain_type - Modules/itertoolsmodule.c - combinations_type - Modules/itertoolsmodule.c - compress_type - @@ -532,70 +358,18 @@ Modules/itertoolsmodule.c - tee_type - Modules/itertoolsmodule.c - teedataobject_type - Modules/itertoolsmodule.c - ziplongest_type - -#----------------------- -# other - -# statically initializd pointer to static type -# XXX should be const? -Modules/_io/winconsoleio.c - _PyWindowsConsoleIO_Type - - -# initialized once -Modules/_functoolsmodule.c - kwd_mark - -Modules/_io/_iomodule.c - _PyIO_empty_bytes - -Modules/_testcapi/heaptype.c - _testcapimodule - -Modules/_testcapi/unicode.c - _testcapimodule - -Modules/_tracemalloc.c - tracemalloc_empty_traceback - -Modules/signalmodule.c - DefaultHandler - -Modules/signalmodule.c - IgnoreHandler - -Modules/signalmodule.c - IntHandler - - -# state -Modules/faulthandler.c - fatal_error - -Modules/faulthandler.c - thread - -Modules/faulthandler.c - user_signals - -Modules/faulthandler.c - stack - -Modules/faulthandler.c - old_stack - -Modules/signalmodule.c - Handlers - - ################################## -# global non-objects to fix in builtin modules - -#----------------------- -# initialized once - -Modules/_io/bufferedio.c _PyIO_trap_eintr eintr_int - -Modules/posixmodule.c os_dup2_impl dup3_works - -Modules/posixmodule.c - structseq_new - -Modules/posixmodule.c - ticks_per_second - -Modules/timemodule.c _PyTime_GetClockWithInfo initialized - -Modules/timemodule.c _PyTime_GetProcessTimeWithInfo ticks_per_second - - -#----------------------- -# state - -Modules/_tracemalloc.c - allocators - -Modules/_tracemalloc.c - tables_lock - -Modules/_tracemalloc.c - tracemalloc_traced_memory - -Modules/_tracemalloc.c - tracemalloc_peak_traced_memory - -Modules/_tracemalloc.c - tracemalloc_filenames - -Modules/_tracemalloc.c - tracemalloc_traceback - -Modules/_tracemalloc.c - tracemalloc_tracebacks - -Modules/_tracemalloc.c - tracemalloc_traces - -Modules/_tracemalloc.c - tracemalloc_domains - -Modules/_tracemalloc.c - tracemalloc_reentrant_key - -Modules/faulthandler.c faulthandler_dump_traceback reentrant - -Modules/posixmodule.c - environ - -Modules/signalmodule.c - is_tripped - -Modules/signalmodule.c - signal_global_state - -Modules/signalmodule.c - wakeup - +## global non-objects to fix in builtin modules + +# <none> ################################## -# global objects to fix in extension modules +## global objects to fix in extension modules -#----------------------- -# static types +##----------------------- +## static types Modules/_asynciomodule.c - FutureIterType - Modules/_asynciomodule.c - FutureType - @@ -660,7 +434,6 @@ Modules/_pickle.c - PicklerMemoProxyType - Modules/_pickle.c - Pickler_Type - Modules/_pickle.c - UnpicklerMemoProxyType - Modules/_pickle.c - Unpickler_Type - -Modules/_xxsubinterpretersmodule.c - ChannelIDtype - Modules/_zoneinfo.c - PyZoneInfo_ZoneInfoType - Modules/ossaudiodev.c - OSSAudioType - Modules/ossaudiodev.c - OSSMixerType - @@ -671,10 +444,10 @@ Modules/xxmodule.c - Xxo_Type - Modules/xxsubtype.c - spamdict_type - Modules/xxsubtype.c - spamlist_type - -#----------------------- -# non-static types - initialized once +##----------------------- +## non-static types - initialized once -# heap types +## heap types Modules/_decimal/_decimal.c - DecimalTuple - Modules/_decimal/_decimal.c - PyDecSignalDict_Type - Modules/_tkinter.c - PyTclObject_Type - @@ -682,115 +455,26 @@ Modules/_tkinter.c - Tkapp_Type - Modules/_tkinter.c - Tktt_Type - Modules/xxlimited_35.c - Xxo_Type - -# exception types +## exception types Modules/_ctypes/_ctypes.c - PyExc_ArgError - Modules/_cursesmodule.c - PyCursesError - Modules/_decimal/_decimal.c - DecimalException - Modules/_tkinter.c - Tkinter_TclError - -Modules/_xxsubinterpretersmodule.c - ChannelError - -Modules/_xxsubinterpretersmodule.c - ChannelNotFoundError - -Modules/_xxsubinterpretersmodule.c - ChannelClosedError - -Modules/_xxsubinterpretersmodule.c - ChannelEmptyError - -Modules/_xxsubinterpretersmodule.c - ChannelNotEmptyError - -Modules/_xxsubinterpretersmodule.c - RunFailedError - Modules/ossaudiodev.c - OSSAudioError - Modules/socketmodule.c - socket_herror - Modules/socketmodule.c - socket_gaierror - Modules/xxlimited_35.c - ErrorObject - Modules/xxmodule.c - ErrorObject - -#----------------------- -# cached - initialized once - -# _Py_IDENTIFIER (global) -Modules/_asynciomodule.c - PyId___asyncio_running_event_loop__ - -Modules/_asynciomodule.c - PyId__asyncio_future_blocking - -Modules/_asynciomodule.c - PyId_add_done_callback - -Modules/_asynciomodule.c - PyId_call_soon - -Modules/_asynciomodule.c - PyId_cancel - -Modules/_asynciomodule.c - PyId_get_event_loop - -Modules/_asynciomodule.c - PyId_throw - -Modules/_datetimemodule.c - PyId_as_integer_ratio - -Modules/_datetimemodule.c - PyId_fromutc - -Modules/_datetimemodule.c - PyId_isoformat - -Modules/_datetimemodule.c - PyId_strftime - - -# _Py_IDENTIFIER (local) -Modules/_asynciomodule.c FutureObj_finalize PyId_call_exception_handler - -Modules/_asynciomodule.c FutureObj_finalize PyId_exception - -Modules/_asynciomodule.c FutureObj_finalize PyId_future - -Modules/_asynciomodule.c FutureObj_finalize PyId_message - -Modules/_asynciomodule.c FutureObj_finalize PyId_source_traceback - -Modules/_asynciomodule.c FutureObj_get_state PyId_CANCELLED - -Modules/_asynciomodule.c FutureObj_get_state PyId_FINISHED - -Modules/_asynciomodule.c FutureObj_get_state PyId_PENDING - -Modules/_asynciomodule.c TaskObj_finalize PyId_call_exception_handler - -Modules/_asynciomodule.c TaskObj_finalize PyId_message - -Modules/_asynciomodule.c TaskObj_finalize PyId_source_traceback - -Modules/_asynciomodule.c TaskObj_finalize PyId_task - -Modules/_asynciomodule.c future_init PyId_get_debug - -Modules/_asynciomodule.c get_future_loop PyId__loop - -Modules/_asynciomodule.c get_future_loop PyId_get_loop - -Modules/_asynciomodule.c register_task PyId_add - -Modules/_asynciomodule.c unregister_task PyId_discard - -Modules/_ctypes/_ctypes.c CDataType_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c PyCArrayType_new PyId__length_ - -Modules/_ctypes/_ctypes.c PyCArrayType_new PyId__type_ - -Modules/_ctypes/_ctypes.c PyCFuncPtr_set_restype PyId__check_retval_ - -Modules/_ctypes/_ctypes.c PyCPointerType_new PyId__type_ - -Modules/_ctypes/_ctypes.c PyCPointerType_set_type PyId__type_ - -Modules/_ctypes/_ctypes.c PyCSimpleType_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c PyCSimpleType_new PyId__type_ - -Modules/_ctypes/_ctypes.c StructUnionType_new PyId__abstract_ - -Modules/_ctypes/_ctypes.c StructUnionType_new PyId__fields_ - -Modules/_ctypes/_ctypes.c _build_result PyId___ctypes_from_outparam__ - -Modules/_ctypes/_ctypes.c _init_pos_args PyId__fields_ - -Modules/_ctypes/_ctypes.c c_char_p_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c c_void_p_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c c_wchar_p_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c converters_from_argtypes PyId_from_param - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__argtypes_ - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__check_retval_ - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__flags_ - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__restype_ - -Modules/_ctypes/callproc.c ConvParam PyId__as_parameter_ - -Modules/_ctypes/callproc.c unpickle PyId___new__ - -Modules/_ctypes/callproc.c unpickle PyId___setstate__ - -Modules/_ctypes/stgdict.c MakeAnonFields PyId__anonymous_ - -Modules/_ctypes/stgdict.c PyCStructUnionType_update_stgdict PyId__pack_ - -Modules/_ctypes/stgdict.c PyCStructUnionType_update_stgdict PyId__swappedbytes_ - -Modules/_ctypes/stgdict.c PyCStructUnionType_update_stgdict PyId__use_broken_old_ctypes_structure_semantics_ - -Modules/_cursesmodule.c _curses_getwin PyId_read - -Modules/_cursesmodule.c _curses_window_putwin PyId_write - -Modules/_cursesmodule.c update_lines_cols PyId_COLS - -Modules/_cursesmodule.c update_lines_cols PyId_LINES - -Modules/_datetimemodule.c call_tzname PyId_tzname - -Modules/_datetimemodule.c date_strftime PyId_timetuple - -Modules/_datetimemodule.c date_today PyId_fromtimestamp - -Modules/_datetimemodule.c datetime_strptime PyId__strptime_datetime - -Modules/_datetimemodule.c make_Zreplacement PyId_replace - -Modules/_datetimemodule.c tzinfo_reduce PyId___getinitargs__ - -Modules/_elementtree.c _elementtree_Element_find_impl PyId_find - -Modules/_elementtree.c _elementtree_Element_findall_impl PyId_findall - -Modules/_elementtree.c _elementtree_Element_findtext_impl PyId_findtext - -Modules/_elementtree.c _elementtree_Element_iterfind_impl PyId_iterfind - -Modules/_elementtree.c expat_start_doctype_handler PyId_doctype - -Modules/_elementtree.c treebuilder_add_subelement PyId_append - -Modules/_elementtree.c treebuilder_flush_data PyId_tail - -Modules/_elementtree.c treebuilder_flush_data PyId_text - -Modules/_json.c _encoded_const PyId_false - -Modules/_json.c _encoded_const PyId_null - -Modules/_json.c _encoded_const PyId_true - -Modules/_json.c raise_errmsg PyId_JSONDecodeError - -Modules/_json.c raise_errmsg PyId_decoder - -Modules/ossaudiodev.c oss_exit PyId_close - - -# manually cached PyUnicodeOjbect +##----------------------- +## cached - initialized once + +## manually cached PyUnicodeOjbect Modules/_asynciomodule.c - context_kwname - Modules/_ctypes/callproc.c _ctypes_get_errobj error_object_name - Modules/_ctypes/_ctypes.c CreateSwappedType suffix - -# other - during module init +## other - during module init Modules/_asynciomodule.c - asyncio_mod - Modules/_asynciomodule.c - traceback_extract_stack - Modules/_asynciomodule.c - asyncio_future_repr_func - @@ -805,10 +489,10 @@ Modules/_zoneinfo.c - io_open - Modules/_zoneinfo.c - _tzpath_find_tzfile - Modules/_zoneinfo.c - _common_mod - -#----------------------- -# other +##----------------------- +## other -# initialized once +## initialized once Modules/_ctypes/_ctypes.c - _unpickle - Modules/_ctypes/_ctypes.c PyCArrayType_from_ctype cache - Modules/_cursesmodule.c - ModDict - @@ -831,7 +515,7 @@ Modules/_decimal/_decimal.c - Rational - Modules/_decimal/_decimal.c - SignalTuple - Modules/arraymodule.c array_array___reduce_ex___impl array_reconstructor - -# state +## state Modules/_asynciomodule.c - cached_running_holder - Modules/_asynciomodule.c - fi_freelist - Modules/_asynciomodule.c - fi_freelist_len - @@ -846,20 +530,19 @@ Modules/_tkinter.c - valInCmd - Modules/_tkinter.c - trbInCmd - Modules/_zoneinfo.c - TIMEDELTA_CACHE - Modules/_zoneinfo.c - ZONEINFO_WEAK_CACHE - -Modules/syslogmodule.c - S_ident_o - ################################## -# global non-objects to fix in extension modules +## global non-objects to fix in extension modules -#----------------------- -# initialized once +##----------------------- +## initialized once -# pre-allocated buffer +## pre-allocated buffer Modules/nismodule.c nisproc_maplist_2 res - Modules/pyexpat.c PyUnknownEncodingHandler template_buffer - -# other +## other Include/datetime.h - PyDateTimeAPI - Modules/_asynciomodule.c - module_initialized - Modules/_ctypes/cfield.c _ctypes_get_fielddesc initialized - @@ -904,8 +587,8 @@ Modules/readline.c - libedit_history_start - Modules/socketmodule.c - accept4_works - Modules/socketmodule.c - sock_cloexec_works - -#----------------------- -# state +##----------------------- +## state Modules/_asynciomodule.c - cached_running_holder_tsid - Modules/_asynciomodule.c - task_name_counter - @@ -935,4 +618,3 @@ Modules/readline.c - completed_input_string - Modules/rotatingtree.c - random_stream - Modules/rotatingtree.c - random_value - Modules/socketmodule.c - defaulttimeout - -Modules/syslogmodule.c - S_log_open - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index dbfb0e01e7f753..c71fc0d958216c 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -1,337 +1,177 @@ filename funcname name reason #??? - somevar ??? +# All globals here are technically mutable but known to be safe. + + ################################## -# mutable but known to be safe +## process-global values - set once -Python/pylifecycle.c - _PyRuntime - +# These will never re-initialize (but would be idempotent). +# These are effectively const. -# All uses of _PyArg_Parser are handled in c-analyzr/cpython/_analyzer.py. +##----------------------- +## process-global resources -#----------------------- -# others +## indicators for resource availability/capability +# (set during first init) +Python/bootstrap_hash.c py_getrandom getrandom_works - +Python/fileutils.c - _Py_open_cloexec_works - +Python/fileutils.c set_inheritable ioctl_works - +# (set lazily, *after* first init) +# XXX Is this thread-safe? +Modules/posixmodule.c os_dup2_impl dup3_works - + +## guards around resource init +Python/thread_pthread.h PyThread__init_thread lib_initialized - + +##----------------------- +## other values (not Python-specific) + +## cached computed data - set lazily (*after* first init) +# XXX Are these safe relative to write races? +Objects/longobject.c long_from_non_binary_base log_base_BASE - +Objects/longobject.c long_from_non_binary_base convwidth_base - +Objects/longobject.c long_from_non_binary_base convmultmax_base - +Objects/unicodeobject.c - bloom_linebreak - +# This is safe: +Objects/unicodeobject.c _init_global_state initialized - + +##----------------------- +## other values (Python-specific) + +## internal state - set before/during first init +Modules/getbuildinfo.c - buildinfo - +Modules/getbuildinfo.c - initialized - +Python/getversion.c - initialized - +Python/getversion.c - version - + +## public C-API - set during first init +Python/bootstrap_hash.c - _Py_HashSecret_Initialized - +Python/pyhash.c - _Py_HashSecret - + +## internal state - set lazily (*after* first init) +# XXX Move to _PyRuntimeState (i.e. tie to init/fini cycle)? +Parser/action_helpers.c _PyPegen_dummy_name cache - -# XXX The analyzer should have ignored these. -Modules/_io/_iomodule.c - _PyIO_Module - -Modules/_sqlite/module.c - _sqlite3module - ################################## -# forward/extern references -# XXX The analyzer should have ignored these. +## state tied to Py_Main() +# (only in main thread) -Include/py_curses.h - PyCurses_API - -Include/pydecimal.h - _decimal_api - -Modules/_blake2/blake2module.c - blake2b_type_spec - -Modules/_blake2/blake2module.c - blake2s_type_spec - -Modules/_io/fileio.c - _Py_open_cloexec_works - -Modules/_io/_iomodule.h - PyIOBase_Type - -Modules/_io/_iomodule.h - PyRawIOBase_Type - -Modules/_io/_iomodule.h - PyBufferedIOBase_Type - -Modules/_io/_iomodule.h - PyTextIOBase_Type - -Modules/_io/_iomodule.h - PyFileIO_Type - -Modules/_io/_iomodule.h - PyBytesIO_Type - -Modules/_io/_iomodule.h - PyStringIO_Type - -Modules/_io/_iomodule.h - PyBufferedReader_Type - -Modules/_io/_iomodule.h - PyBufferedWriter_Type - -Modules/_io/_iomodule.h - PyBufferedRWPair_Type - -Modules/_io/_iomodule.h - PyBufferedRandom_Type - -Modules/_io/_iomodule.h - PyTextIOWrapper_Type - -Modules/_io/_iomodule.h - PyIncrementalNewlineDecoder_Type - -Modules/_io/_iomodule.h - _PyBytesIOBuffer_Type - -Modules/_io/_iomodule.h - _PyIO_Module - -Modules/_io/_iomodule.h - _PyIO_str_close - -Modules/_io/_iomodule.h - _PyIO_str_closed - -Modules/_io/_iomodule.h - _PyIO_str_decode - -Modules/_io/_iomodule.h - _PyIO_str_encode - -Modules/_io/_iomodule.h - _PyIO_str_fileno - -Modules/_io/_iomodule.h - _PyIO_str_flush - -Modules/_io/_iomodule.h - _PyIO_str_getstate - -Modules/_io/_iomodule.h - _PyIO_str_isatty - -Modules/_io/_iomodule.h - _PyIO_str_newlines - -Modules/_io/_iomodule.h - _PyIO_str_nl - -Modules/_io/_iomodule.h - _PyIO_str_peek - -Modules/_io/_iomodule.h - _PyIO_str_read - -Modules/_io/_iomodule.h - _PyIO_str_read1 - -Modules/_io/_iomodule.h - _PyIO_str_readable - -Modules/_io/_iomodule.h - _PyIO_str_readall - -Modules/_io/_iomodule.h - _PyIO_str_readinto - -Modules/_io/_iomodule.h - _PyIO_str_readline - -Modules/_io/_iomodule.h - _PyIO_str_reset - -Modules/_io/_iomodule.h - _PyIO_str_seek - -Modules/_io/_iomodule.h - _PyIO_str_seekable - -Modules/_io/_iomodule.h - _PyIO_str_setstate - -Modules/_io/_iomodule.h - _PyIO_str_tell - -Modules/_io/_iomodule.h - _PyIO_str_truncate - -Modules/_io/_iomodule.h - _PyIO_str_writable - -Modules/_io/_iomodule.h - _PyIO_str_write - -Modules/_io/_iomodule.h - _PyIO_empty_str - -Modules/_io/_iomodule.h - _PyIO_empty_bytes - -Modules/_multiprocessing/multiprocessing.h - _PyMp_SemLockType - -Modules/_sqlite/module.c - _pysqlite_converters - -Modules/_sqlite/module.c - _pysqlite_enable_callback_tracebacks - -Modules/_sqlite/module.c - pysqlite_BaseTypeAdapted - -Modules/_sqlite/module.h - pysqlite_global_state - -Modules/_testcapimodule.c - _PyBytesIOBuffer_Type - -Modules/posixmodule.c - _Py_open_cloexec_works - -Objects/object.c - _Py_GenericAliasIterType - -Objects/object.c - _PyMemoryIter_Type - -Objects/object.c - _PyLineIterator - -Objects/object.c - _PyPositionsIterator - -Python/perf_trampoline.c - _Py_trampoline_func_start - -Python/perf_trampoline.c - _Py_trampoline_func_end - -Python/importdl.h - _PyImport_DynLoadFiletab - +##----------------------- +## handling C argv -Modules/expat/xmlrole.c - prolog0 - -Modules/expat/xmlrole.c - prolog1 - -Modules/expat/xmlrole.c - prolog2 - -Modules/expat/xmlrole.c - doctype0 - -Modules/expat/xmlrole.c - doctype1 - -Modules/expat/xmlrole.c - doctype2 - -Modules/expat/xmlrole.c - doctype3 - -Modules/expat/xmlrole.c - doctype4 - -Modules/expat/xmlrole.c - doctype5 - -Modules/expat/xmlrole.c - internalSubset - -Modules/expat/xmlrole.c - entity0 - -Modules/expat/xmlrole.c - entity1 - -Modules/expat/xmlrole.c - entity2 - -Modules/expat/xmlrole.c - entity3 - -Modules/expat/xmlrole.c - entity4 - -Modules/expat/xmlrole.c - entity5 - -Modules/expat/xmlrole.c - entity6 - -Modules/expat/xmlrole.c - entity7 - -Modules/expat/xmlrole.c - entity8 - -Modules/expat/xmlrole.c - entity9 - -Modules/expat/xmlrole.c - entity10 - -Modules/expat/xmlrole.c - notation0 - -Modules/expat/xmlrole.c - notation1 - -Modules/expat/xmlrole.c - notation2 - -Modules/expat/xmlrole.c - notation3 - -Modules/expat/xmlrole.c - notation4 - -Modules/expat/xmlrole.c - attlist0 - -Modules/expat/xmlrole.c - attlist1 - -Modules/expat/xmlrole.c - attlist2 - -Modules/expat/xmlrole.c - attlist3 - -Modules/expat/xmlrole.c - attlist4 - -Modules/expat/xmlrole.c - attlist5 - -Modules/expat/xmlrole.c - attlist6 - -Modules/expat/xmlrole.c - attlist7 - -Modules/expat/xmlrole.c - attlist8 - -Modules/expat/xmlrole.c - attlist9 - -Modules/expat/xmlrole.c - element0 - -Modules/expat/xmlrole.c - element1 - -Modules/expat/xmlrole.c - element2 - -Modules/expat/xmlrole.c - element3 - -Modules/expat/xmlrole.c - element4 - -Modules/expat/xmlrole.c - element5 - -Modules/expat/xmlrole.c - element6 - -Modules/expat/xmlrole.c - element7 - -Modules/expat/xmlrole.c - externalSubset0 - -Modules/expat/xmlrole.c - externalSubset1 - -Modules/expat/xmlrole.c - condSect0 - -Modules/expat/xmlrole.c - condSect1 - -Modules/expat/xmlrole.c - condSect2 - -Modules/expat/xmlrole.c - declClose - -Modules/expat/xmlrole.c - error - +Python/getopt.c - _PyOS_optarg - +Python/getopt.c - _PyOS_opterr - +Python/getopt.c - _PyOS_optind - +Python/getopt.c - opt_ptr - +Python/pathconfig.c - _Py_path_config - + +##----------------------- +## REPL + +Parser/myreadline.c - _PyOS_ReadlineLock - +Parser/myreadline.c - _PyOS_ReadlineTState - +Parser/myreadline.c - PyOS_InputHook - +Parser/myreadline.c - PyOS_ReadlineFunctionPointer - ################################## -# test code +## runtime-global values - set once with each init -Modules/_ctypes/_ctypes_test.c - _ctypes_test_slots - -Modules/_ctypes/_ctypes_test.c - _ctypes_testmodule - -Modules/_ctypes/_ctypes_test.c - _xxx_lib - -Modules/_ctypes/_ctypes_test.c - an_integer - -Modules/_ctypes/_ctypes_test.c - bottom - -Modules/_ctypes/_ctypes_test.c - last_tf_arg_s - -Modules/_ctypes/_ctypes_test.c - last_tf_arg_u - -Modules/_ctypes/_ctypes_test.c - last_tfrsuv_arg - -Modules/_ctypes/_ctypes_test.c - left - -Modules/_ctypes/_ctypes_test.c - module_methods - -Modules/_ctypes/_ctypes_test.c - my_eggs - -Modules/_ctypes/_ctypes_test.c - my_spams - -Modules/_ctypes/_ctypes_test.c - right - -Modules/_ctypes/_ctypes_test.c - top - -Modules/_testbuffer.c - NDArray_Type - -Modules/_testbuffer.c - StaticArray_Type - -Modules/_testbuffer.c - Struct - -Modules/_testbuffer.c - _testbuffer_functions - -Modules/_testbuffer.c - _testbuffermodule - -Modules/_testbuffer.c - calcsize - -Modules/_testbuffer.c - infobuf - -Modules/_testbuffer.c - ndarray_as_buffer - -Modules/_testbuffer.c - ndarray_as_mapping - -Modules/_testbuffer.c - ndarray_as_sequence - -Modules/_testbuffer.c - ndarray_getset - -Modules/_testbuffer.c - ndarray_methods - -Modules/_testbuffer.c - simple_fmt - -Modules/_testbuffer.c - simple_format - -Modules/_testbuffer.c - static_buffer - -Modules/_testbuffer.c - static_mem - -Modules/_testbuffer.c - static_shape - -Modules/_testbuffer.c - static_strides - -Modules/_testbuffer.c - staticarray_as_buffer - -Modules/_testbuffer.c - structmodule - -Modules/_testbuffer.c ndarray_init kwlist - -Modules/_testbuffer.c ndarray_memoryview_from_buffer format - -Modules/_testbuffer.c ndarray_memoryview_from_buffer info - -Modules/_testbuffer.c ndarray_memoryview_from_buffer shape - -Modules/_testbuffer.c ndarray_memoryview_from_buffer strides - -Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets - -Modules/_testbuffer.c ndarray_push kwlist - -Modules/_testbuffer.c staticarray_init kwlist - -Modules/_testcapimodule.c - ContainerNoGC_members - -Modules/_testcapimodule.c - ContainerNoGC_type - -Modules/_testcapimodule.c - FmData - -Modules/_testcapimodule.c - FmHook - -Modules/_testcapimodule.c - GenericAlias_Type - -Modules/_testcapimodule.c - Generic_Type - -Modules/_testcapimodule.c - HeapCTypeSetattr_slots - -Modules/_testcapimodule.c - HeapCTypeSetattr_spec - -Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_slots - -Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_spec - -Modules/_testcapimodule.c - HeapCTypeSubclass_slots - -Modules/_testcapimodule.c - HeapCTypeSubclass_spec - -Modules/_testcapimodule.c - HeapCTypeWithBuffer_slots - -Modules/_testcapimodule.c - HeapCTypeWithBuffer_spec - -Modules/_testcapimodule.c - HeapCTypeWithDict_slots - -Modules/_testcapimodule.c - HeapCTypeWithDict_spec - -Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_slots - -Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_spec - -Modules/_testcapimodule.c - HeapCTypeWithWeakref_slots - -Modules/_testcapimodule.c - HeapCTypeWithWeakref_spec - -Modules/_testcapimodule.c - HeapCType_slots - -Modules/_testcapimodule.c - HeapCType_spec - -Modules/_testcapimodule.c - HeapDocCType_slots - -Modules/_testcapimodule.c - HeapDocCType_spec - -Modules/_testcapimodule.c - HeapGcCType_slots - -Modules/_testcapimodule.c - HeapGcCType_spec - -Modules/_testcapimodule.c - MethClass_Type - -Modules/_testcapimodule.c - MethInstance_Type - -Modules/_testcapimodule.c - MethStatic_Type - -Modules/_testcapimodule.c - MethodDescriptor2_Type - -Modules/_testcapimodule.c - MethodDescriptorBase_Type - -Modules/_testcapimodule.c - MethodDescriptorDerived_Type - -Modules/_testcapimodule.c - MethodDescriptorNopGet_Type - -Modules/_testcapimodule.c - MyList_Type - -Modules/_testcapimodule.c - PyRecursingInfinitelyError_Type - -Modules/_testcapimodule.c - TestError - -Modules/_testcapimodule.c - TestMethods - -Modules/_testcapimodule.c - _HashInheritanceTester_Type - -Modules/_testcapimodule.c - _testcapimodule - -Modules/_testcapimodule.c - awaitType - -Modules/_testcapimodule.c - awaitType_as_async - -Modules/_testcapimodule.c - capsule_context - -Modules/_testcapimodule.c - capsule_destructor_call_count - -Modules/_testcapimodule.c - capsule_error - -Modules/_testcapimodule.c - capsule_name - -Modules/_testcapimodule.c - capsule_pointer - -Modules/_testcapimodule.c - decimal_initialized - -Modules/_testcapimodule.c - generic_alias_methods - -Modules/_testcapimodule.c - generic_methods - -Modules/_testcapimodule.c - heapctype_members - -Modules/_testcapimodule.c - heapctypesetattr_members - -Modules/_testcapimodule.c - heapctypesubclass_members - -Modules/_testcapimodule.c - heapctypewithdict_getsetlist - -Modules/_testcapimodule.c - heapctypewithdict_members - -Modules/_testcapimodule.c - heapctypewithnegativedict_members - -Modules/_testcapimodule.c - heapctypewithweakref_members - -Modules/_testcapimodule.c - ipowType - -Modules/_testcapimodule.c - ipowType_as_number - -Modules/_testcapimodule.c - matmulType - -Modules/_testcapimodule.c - matmulType_as_number - -Modules/_testcapimodule.c - meth_class_methods - -Modules/_testcapimodule.c - meth_instance_methods - -Modules/_testcapimodule.c - meth_static_methods - -Modules/_testcapimodule.c - ml - -Modules/_testcapimodule.c - str1 - -Modules/_testcapimodule.c - str2 - -Modules/_testcapimodule.c - test_members - -Modules/_testcapimodule.c - test_run_counter - -Modules/_testcapimodule.c - test_structmembersType - -Modules/_testcapimodule.c - thread_done - -Modules/_testcapimodule.c - x - -Modules/_testcapimodule.c getargs_keyword_only keywords - -Modules/_testcapimodule.c getargs_keywords keywords - -Modules/_testcapimodule.c getargs_positional_only_and_keywords keywords - -Modules/_testcapimodule.c getargs_s_hash_int2 keywords static char*[] -Modules/_testcapimodule.c make_exception_with_doc kwlist - -Modules/_testcapimodule.c raise_SIGINT_then_send_None PyId_send - -Modules/_testcapimodule.c slot_tp_del PyId___tp_del__ - -Modules/_testcapimodule.c test_capsule buffer - -Modules/_testcapimodule.c test_empty_argparse kwlist - -Modules/_testcapimodule.c test_structmembers_new keywords - -Modules/_testcapimodule.c getargs_s_hash_int keywords - -Modules/_testimportmultiple.c - _barmodule - -Modules/_testimportmultiple.c - _foomodule - -Modules/_testimportmultiple.c - _testimportmultiple - -Modules/_testinternalcapi.c - TestMethods - -Modules/_testinternalcapi.c - _testcapimodule - -Modules/_testmultiphase.c - Example_Type_slots - -Modules/_testmultiphase.c - Example_Type_spec - -Modules/_testmultiphase.c - Example_methods - -Modules/_testmultiphase.c - StateAccessType_Type_slots - -Modules/_testmultiphase.c - StateAccessType_methods - -Modules/_testmultiphase.c - StateAccessType_spec - -Modules/_testmultiphase.c - Str_Type_slots - -Modules/_testmultiphase.c - Str_Type_spec - -Modules/_testmultiphase.c - def_bad_large - -Modules/_testmultiphase.c - def_bad_negative - -Modules/_testmultiphase.c - def_create_int_with_state - -Modules/_testmultiphase.c - def_create_null - -Modules/_testmultiphase.c - def_create_raise - -Modules/_testmultiphase.c - def_create_unreported_exception - -Modules/_testmultiphase.c - def_exec_err - -Modules/_testmultiphase.c - def_exec_raise - -Modules/_testmultiphase.c - def_exec_unreported_exception - -Modules/_testmultiphase.c - def_meth_state_access - -Modules/_testmultiphase.c - def_negative_size - -Modules/_testmultiphase.c - def_nonascii_kana - -Modules/_testmultiphase.c - def_nonascii_latin - -Modules/_testmultiphase.c - def_nonmodule - -Modules/_testmultiphase.c - def_nonmodule_with_exec_slots - -Modules/_testmultiphase.c - def_nonmodule_with_methods - -Modules/_testmultiphase.c - imp_dummy_def - -Modules/_testmultiphase.c - main_def - -Modules/_testmultiphase.c - main_slots - -Modules/_testmultiphase.c - meth_state_access_slots - -Modules/_testmultiphase.c - nonmodule_methods - -Modules/_testmultiphase.c - null_slots_def - -Modules/_testmultiphase.c - slots_bad_large - -Modules/_testmultiphase.c - slots_bad_negative - -Modules/_testmultiphase.c - slots_create_nonmodule - -Modules/_testmultiphase.c - slots_create_nonmodule - -Modules/_testmultiphase.c - slots_create_null - -Modules/_testmultiphase.c - slots_create_raise - -Modules/_testmultiphase.c - slots_create_unreported_exception - -Modules/_testmultiphase.c - slots_exec_err - -Modules/_testmultiphase.c - slots_exec_raise - -Modules/_testmultiphase.c - slots_exec_unreported_exception - -Modules/_testmultiphase.c - slots_nonmodule_with_exec_slots - -Modules/_testmultiphase.c - testexport_methods - -Modules/_testmultiphase.c - uninitialized_def - -Modules/_xxtestfuzz/_xxtestfuzz.c - _fuzzmodule - -Modules/_xxtestfuzz/_xxtestfuzz.c - module_methods - -Modules/_xxtestfuzz/fuzzer.c - SRE_FLAG_DEBUG - -Modules/_xxtestfuzz/fuzzer.c - ast_literal_eval_method - -Modules/_xxtestfuzz/fuzzer.c - compiled_patterns - -Modules/_xxtestfuzz/fuzzer.c - csv_error - -Modules/_xxtestfuzz/fuzzer.c - csv_module - -Modules/_xxtestfuzz/fuzzer.c - json_loads_method - -Modules/_xxtestfuzz/fuzzer.c - regex_patterns - -Modules/_xxtestfuzz/fuzzer.c - sre_compile_method - -Modules/_xxtestfuzz/fuzzer.c - sre_error_exception - -Modules/_xxtestfuzz/fuzzer.c - struct_error - -Modules/_xxtestfuzz/fuzzer.c - struct_unpack_method - -Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput CSV_READER_INITIALIZED - -Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput JSON_LOADS_INITIALIZED - -Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_COMPILE_INITIALIZED - -Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_MATCH_INITIALIZED - -Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput STRUCT_UNPACK_INITIALIZED - -Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput AST_LITERAL_EVAL_INITIALIZED - +# These are effectively const. + +##----------------------- +## set by embedders before init +# (whether directly or through a call) + +Python/initconfig.c - _Py_StandardStreamEncoding - +Python/initconfig.c - _Py_StandardStreamErrors - + +##----------------------- +## public C-API + +## deprecated +Python/preconfig.c - Py_FileSystemDefaultEncoding - +Python/preconfig.c - Py_HasFileSystemDefaultEncoding - +Python/preconfig.c - Py_FileSystemDefaultEncodeErrors - +Python/preconfig.c - _Py_HasFileSystemDefaultEncodeErrors - + +## legacy config flags +Python/initconfig.c - Py_UTF8Mode - +Python/initconfig.c - Py_DebugFlag - +Python/initconfig.c - Py_VerboseFlag - +Python/initconfig.c - Py_QuietFlag - +Python/initconfig.c - Py_InteractiveFlag - +Python/initconfig.c - Py_InspectFlag - +Python/initconfig.c - Py_OptimizeFlag - +Python/initconfig.c - Py_NoSiteFlag - +Python/initconfig.c - Py_BytesWarningFlag - +Python/initconfig.c - Py_FrozenFlag - +Python/initconfig.c - Py_IgnoreEnvironmentFlag - +Python/initconfig.c - Py_DontWriteBytecodeFlag - +Python/initconfig.c - Py_NoUserSiteDirectory - +Python/initconfig.c - Py_UnbufferedStdioFlag - +Python/initconfig.c - Py_HashRandomizationFlag - +Python/initconfig.c - Py_IsolatedFlag - +Python/initconfig.c - Py_LegacyWindowsFSEncodingFlag - +Python/initconfig.c - Py_LegacyWindowsStdioFlag - + +##----------------------- +## initialized statically, may be customized by embedders + +Python/frozen.c - PyImport_FrozenModules - +Python/import.c - inittab_copy - +Python/import.c - PyImport_Inittab - ################################## -# should be const +## runtime-global state + +##----------------------- +## tied to each init/fini cycle + +## the consolidated runtime state +Python/pylifecycle.c - _PyRuntime - +Python/pylifecycle.c - runtime_initialized - + +# All cases of _PyArg_Parser are handled in c-analyzr/cpython/_analyzer.py. + +## main interp state in stdlib modules +Modules/syslogmodule.c - S_ident_o - +Modules/syslogmodule.c - S_log_open - + +##----------------------- +## kept for stable ABI compatibility + +# XXX should be per-interpreter, without impacting stable ABI extensions +Objects/object.c - _Py_RefTotal - + +##----------------------- +## one-off temporary state + +# used during runtime init +Python/sysmodule.c - _preinit_warnoptions - +Python/sysmodule.c - _preinit_xoptions - + +# thread-safety +# XXX need race protection? +Modules/faulthandler.c faulthandler_dump_traceback reentrant - +Python/pylifecycle.c _Py_FatalErrorFormat reentrant - +Python/pylifecycle.c fatal_error reentrant - + + +################################## +## not significant + +##----------------------- +## not used (kept for compatibility) + +Python/pyfpe.c - PyFPE_counter - + +##----------------------- +## should be const # XXX Make them const. # These are all variables that we will be leaving global. @@ -339,7 +179,6 @@ Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput AST_LITERAL_EVAL_INITIALIZED # All module defs, type defs, etc. are handled in c-analyzr/cpython/_analyzer.py. # All kwlist arrays are handled in c-analyzr/cpython/_analyzer.py. -#----------------------- # other vars that are actually constant Include/internal/pycore_blocks_output_buffer.h - BUFFER_BLOCK_SIZE - @@ -373,6 +212,7 @@ Modules/_decimal/_decimal.c - ssize_constants - Modules/_elementtree.c - ExpatMemoryHandler - Modules/_io/_iomodule.c - static_types - Modules/_io/textio.c - encodefuncs - +Modules/_io/winconsoleio.c - _PyWindowsConsoleIO_Type - Modules/_localemodule.c - langinfo_constants - Modules/_pickle.c - READ_WHOLE_LINE - Modules/_sqlite/module.c - error_codes - @@ -465,8 +305,8 @@ Objects/obmalloc.c - _PyMem_Debug - Objects/obmalloc.c - _PyMem_Raw - Objects/obmalloc.c - _PyObject - Objects/obmalloc.c - usedpools - -Python/perf_trampoline.c - _Py_perfmap_callbacks - Objects/typeobject.c - name_op - +Objects/typeobject.c - slotdefs - Objects/unicodeobject.c - stripfuncnames - Objects/unicodeobject.c - utf7_category - Objects/unicodeobject.c unicode_decode_call_errorhandler_wchar argparse - @@ -489,10 +329,15 @@ Python/frozen.c - aliases - Python/frozen.c - bootstrap_modules - Python/frozen.c - stdlib_modules - Python/frozen.c - test_modules - +Python/frozen.c - _PyImport_FrozenAliases - +Python/frozen.c - _PyImport_FrozenBootstrap - +Python/frozen.c - _PyImport_FrozenStdlib - +Python/frozen.c - _PyImport_FrozenTest - Python/getopt.c - longopts - Python/import.c - _PyImport_Inittab - Python/import.c - _PySys_ImplCacheTag - Python/opcode_targets.h - opcode_targets - +Python/perf_trampoline.c - _Py_perfmap_callbacks - Python/pyhash.c - PyHash_Func - Python/pylifecycle.c - _C_LOCALE_WARNING - Python/pylifecycle.c - _PyOS_mystrnicmp_hack - @@ -501,6 +346,331 @@ Python/pystate.c - initial - Python/specialize.c - adaptive_opcodes - Python/specialize.c - cache_requirements - Python/specialize.c - compare_masks - -Python/specialize.c - _PyOpcode_Adaptive - Python/stdlib_module_names.h - _Py_stdlib_module_names - +Python/sysmodule.c - _PySys_ImplCacheTag - +Python/sysmodule.c - _PySys_ImplName - Python/sysmodule.c - whatstrings - + +##----------------------- +## test code + +Modules/_ctypes/_ctypes_test.c - _ctypes_test_slots - +Modules/_ctypes/_ctypes_test.c - _ctypes_testmodule - +Modules/_ctypes/_ctypes_test.c - _xxx_lib - +Modules/_ctypes/_ctypes_test.c - an_integer - +Modules/_ctypes/_ctypes_test.c - bottom - +Modules/_ctypes/_ctypes_test.c - last_tf_arg_s - +Modules/_ctypes/_ctypes_test.c - last_tf_arg_u - +Modules/_ctypes/_ctypes_test.c - last_tfrsuv_arg - +Modules/_ctypes/_ctypes_test.c - left - +Modules/_ctypes/_ctypes_test.c - module_methods - +Modules/_ctypes/_ctypes_test.c - my_eggs - +Modules/_ctypes/_ctypes_test.c - my_spams - +Modules/_ctypes/_ctypes_test.c - right - +Modules/_ctypes/_ctypes_test.c - top - +Modules/_testbuffer.c - NDArray_Type - +Modules/_testbuffer.c - StaticArray_Type - +Modules/_testbuffer.c - Struct - +Modules/_testbuffer.c - _testbuffer_functions - +Modules/_testbuffer.c - _testbuffermodule - +Modules/_testbuffer.c - calcsize - +Modules/_testbuffer.c - infobuf - +Modules/_testbuffer.c - ndarray_as_buffer - +Modules/_testbuffer.c - ndarray_as_mapping - +Modules/_testbuffer.c - ndarray_as_sequence - +Modules/_testbuffer.c - ndarray_getset - +Modules/_testbuffer.c - ndarray_methods - +Modules/_testbuffer.c - simple_fmt - +Modules/_testbuffer.c - simple_format - +Modules/_testbuffer.c - static_buffer - +Modules/_testbuffer.c - static_mem - +Modules/_testbuffer.c - static_shape - +Modules/_testbuffer.c - static_strides - +Modules/_testbuffer.c - staticarray_as_buffer - +Modules/_testbuffer.c - structmodule - +Modules/_testbuffer.c ndarray_init kwlist - +Modules/_testbuffer.c ndarray_memoryview_from_buffer format - +Modules/_testbuffer.c ndarray_memoryview_from_buffer info - +Modules/_testbuffer.c ndarray_memoryview_from_buffer shape - +Modules/_testbuffer.c ndarray_memoryview_from_buffer strides - +Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets - +Modules/_testbuffer.c ndarray_push kwlist - +Modules/_testbuffer.c staticarray_init kwlist - +Modules/_testcapi/heaptype.c - _testcapimodule - +Modules/_testcapi/unicode.c - _testcapimodule - +Modules/_testcapimodule.c - ContainerNoGC_members - +Modules/_testcapimodule.c - ContainerNoGC_type - +Modules/_testcapimodule.c - FmData - +Modules/_testcapimodule.c - FmHook - +Modules/_testcapimodule.c - GenericAlias_Type - +Modules/_testcapimodule.c - Generic_Type - +Modules/_testcapimodule.c - HeapCTypeSetattr_slots - +Modules/_testcapimodule.c - HeapCTypeSetattr_spec - +Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_slots - +Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_spec - +Modules/_testcapimodule.c - HeapCTypeSubclass_slots - +Modules/_testcapimodule.c - HeapCTypeSubclass_spec - +Modules/_testcapimodule.c - HeapCTypeWithBuffer_slots - +Modules/_testcapimodule.c - HeapCTypeWithBuffer_spec - +Modules/_testcapimodule.c - HeapCTypeWithDict_slots - +Modules/_testcapimodule.c - HeapCTypeWithDict_spec - +Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_slots - +Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_spec - +Modules/_testcapimodule.c - HeapCTypeWithWeakref_slots - +Modules/_testcapimodule.c - HeapCTypeWithWeakref_spec - +Modules/_testcapimodule.c - HeapCType_slots - +Modules/_testcapimodule.c - HeapCType_spec - +Modules/_testcapimodule.c - HeapDocCType_slots - +Modules/_testcapimodule.c - HeapDocCType_spec - +Modules/_testcapimodule.c - HeapGcCType_slots - +Modules/_testcapimodule.c - HeapGcCType_spec - +Modules/_testcapimodule.c - MethClass_Type - +Modules/_testcapimodule.c - MethInstance_Type - +Modules/_testcapimodule.c - MethStatic_Type - +Modules/_testcapimodule.c - MethodDescriptor2_Type - +Modules/_testcapimodule.c - MethodDescriptorBase_Type - +Modules/_testcapimodule.c - MethodDescriptorDerived_Type - +Modules/_testcapimodule.c - MethodDescriptorNopGet_Type - +Modules/_testcapimodule.c - MyList_Type - +Modules/_testcapimodule.c - PyRecursingInfinitelyError_Type - +Modules/_testcapimodule.c - TestError - +Modules/_testcapimodule.c - TestMethods - +Modules/_testcapimodule.c - _HashInheritanceTester_Type - +Modules/_testcapimodule.c - _testcapimodule - +Modules/_testcapimodule.c - awaitType - +Modules/_testcapimodule.c - awaitType_as_async - +Modules/_testcapimodule.c - capsule_context - +Modules/_testcapimodule.c - capsule_destructor_call_count - +Modules/_testcapimodule.c - capsule_error - +Modules/_testcapimodule.c - capsule_name - +Modules/_testcapimodule.c - capsule_pointer - +Modules/_testcapimodule.c - decimal_initialized - +Modules/_testcapimodule.c - generic_alias_methods - +Modules/_testcapimodule.c - generic_methods - +Modules/_testcapimodule.c - heapctype_members - +Modules/_testcapimodule.c - heapctypesetattr_members - +Modules/_testcapimodule.c - heapctypesubclass_members - +Modules/_testcapimodule.c - heapctypewithdict_getsetlist - +Modules/_testcapimodule.c - heapctypewithdict_members - +Modules/_testcapimodule.c - heapctypewithnegativedict_members - +Modules/_testcapimodule.c - heapctypewithweakref_members - +Modules/_testcapimodule.c - ipowType - +Modules/_testcapimodule.c - ipowType_as_number - +Modules/_testcapimodule.c - matmulType - +Modules/_testcapimodule.c - matmulType_as_number - +Modules/_testcapimodule.c - meth_class_methods - +Modules/_testcapimodule.c - meth_instance_methods - +Modules/_testcapimodule.c - meth_static_methods - +Modules/_testcapimodule.c - ml - +Modules/_testcapimodule.c - str1 - +Modules/_testcapimodule.c - str2 - +Modules/_testcapimodule.c - test_members - +Modules/_testcapimodule.c - test_run_counter - +Modules/_testcapimodule.c - test_structmembersType - +Modules/_testcapimodule.c - thread_done - +Modules/_testcapimodule.c - x - +Modules/_testcapimodule.c getargs_keyword_only keywords - +Modules/_testcapimodule.c getargs_keywords keywords - +Modules/_testcapimodule.c getargs_positional_only_and_keywords keywords - +Modules/_testcapimodule.c getargs_s_hash_int2 keywords static char*[] +Modules/_testcapimodule.c make_exception_with_doc kwlist - +Modules/_testcapimodule.c raise_SIGINT_then_send_None PyId_send - +Modules/_testcapimodule.c slot_tp_del PyId___tp_del__ - +Modules/_testcapimodule.c test_capsule buffer - +Modules/_testcapimodule.c test_empty_argparse kwlist - +Modules/_testcapimodule.c test_structmembers_new keywords - +Modules/_testcapimodule.c getargs_s_hash_int keywords - +Modules/_testcapimodule.c - g_dict_watch_events - +Modules/_testcapimodule.c - g_dict_watchers_installed - +Modules/_testcapimodule.c - g_type_modified_events - +Modules/_testcapimodule.c - g_type_watchers_installed - +Modules/_testimportmultiple.c - _barmodule - +Modules/_testimportmultiple.c - _foomodule - +Modules/_testimportmultiple.c - _testimportmultiple - +Modules/_testinternalcapi.c - TestMethods - +Modules/_testinternalcapi.c - _testcapimodule - +Modules/_testmultiphase.c - Example_Type_slots - +Modules/_testmultiphase.c - Example_Type_spec - +Modules/_testmultiphase.c - Example_methods - +Modules/_testmultiphase.c - StateAccessType_Type_slots - +Modules/_testmultiphase.c - StateAccessType_methods - +Modules/_testmultiphase.c - StateAccessType_spec - +Modules/_testmultiphase.c - Str_Type_slots - +Modules/_testmultiphase.c - Str_Type_spec - +Modules/_testmultiphase.c - def_bad_large - +Modules/_testmultiphase.c - def_bad_negative - +Modules/_testmultiphase.c - def_create_int_with_state - +Modules/_testmultiphase.c - def_create_null - +Modules/_testmultiphase.c - def_create_raise - +Modules/_testmultiphase.c - def_create_unreported_exception - +Modules/_testmultiphase.c - def_exec_err - +Modules/_testmultiphase.c - def_exec_raise - +Modules/_testmultiphase.c - def_exec_unreported_exception - +Modules/_testmultiphase.c - def_meth_state_access - +Modules/_testmultiphase.c - def_negative_size - +Modules/_testmultiphase.c - def_nonascii_kana - +Modules/_testmultiphase.c - def_nonascii_latin - +Modules/_testmultiphase.c - def_nonmodule - +Modules/_testmultiphase.c - def_nonmodule_with_exec_slots - +Modules/_testmultiphase.c - def_nonmodule_with_methods - +Modules/_testmultiphase.c - imp_dummy_def - +Modules/_testmultiphase.c - main_def - +Modules/_testmultiphase.c - main_slots - +Modules/_testmultiphase.c - meth_state_access_slots - +Modules/_testmultiphase.c - nonmodule_methods - +Modules/_testmultiphase.c - null_slots_def - +Modules/_testmultiphase.c - slots_bad_large - +Modules/_testmultiphase.c - slots_bad_negative - +Modules/_testmultiphase.c - slots_create_nonmodule - +Modules/_testmultiphase.c - slots_create_nonmodule - +Modules/_testmultiphase.c - slots_create_null - +Modules/_testmultiphase.c - slots_create_raise - +Modules/_testmultiphase.c - slots_create_unreported_exception - +Modules/_testmultiphase.c - slots_exec_err - +Modules/_testmultiphase.c - slots_exec_raise - +Modules/_testmultiphase.c - slots_exec_unreported_exception - +Modules/_testmultiphase.c - slots_nonmodule_with_exec_slots - +Modules/_testmultiphase.c - testexport_methods - +Modules/_testmultiphase.c - uninitialized_def - +Modules/_xxtestfuzz/_xxtestfuzz.c - _fuzzmodule - +Modules/_xxtestfuzz/_xxtestfuzz.c - module_methods - +Modules/_xxtestfuzz/fuzzer.c - SRE_FLAG_DEBUG - +Modules/_xxtestfuzz/fuzzer.c - ast_literal_eval_method - +Modules/_xxtestfuzz/fuzzer.c - compiled_patterns - +Modules/_xxtestfuzz/fuzzer.c - csv_error - +Modules/_xxtestfuzz/fuzzer.c - csv_module - +Modules/_xxtestfuzz/fuzzer.c - json_loads_method - +Modules/_xxtestfuzz/fuzzer.c - regex_patterns - +Modules/_xxtestfuzz/fuzzer.c - sre_compile_method - +Modules/_xxtestfuzz/fuzzer.c - sre_error_exception - +Modules/_xxtestfuzz/fuzzer.c - struct_error - +Modules/_xxtestfuzz/fuzzer.c - struct_unpack_method - +Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput CSV_READER_INITIALIZED - +Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput JSON_LOADS_INITIALIZED - +Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_COMPILE_INITIALIZED - +Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_MATCH_INITIALIZED - +Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput STRUCT_UNPACK_INITIALIZED - +Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput AST_LITERAL_EVAL_INITIALIZED - + +##----------------------- +## the analyzer should have ignored these +# XXX Fix the analyzer. + +## forward/extern references +Include/py_curses.h - PyCurses_API - +Include/pydecimal.h - _decimal_api - +Modules/_blake2/blake2module.c - blake2b_type_spec - +Modules/_blake2/blake2module.c - blake2s_type_spec - +Modules/_io/fileio.c - _Py_open_cloexec_works - +Modules/_io/_iomodule.h - PyIOBase_Type - +Modules/_io/_iomodule.h - PyRawIOBase_Type - +Modules/_io/_iomodule.h - PyBufferedIOBase_Type - +Modules/_io/_iomodule.h - PyTextIOBase_Type - +Modules/_io/_iomodule.h - PyFileIO_Type - +Modules/_io/_iomodule.h - PyBytesIO_Type - +Modules/_io/_iomodule.h - PyStringIO_Type - +Modules/_io/_iomodule.h - PyBufferedReader_Type - +Modules/_io/_iomodule.h - PyBufferedWriter_Type - +Modules/_io/_iomodule.h - PyBufferedRWPair_Type - +Modules/_io/_iomodule.h - PyBufferedRandom_Type - +Modules/_io/_iomodule.h - PyTextIOWrapper_Type - +Modules/_io/_iomodule.h - PyIncrementalNewlineDecoder_Type - +Modules/_io/_iomodule.h - _PyBytesIOBuffer_Type - +Modules/_io/_iomodule.h - _PyIO_Module - +Modules/_io/_iomodule.h - _PyIO_str_close - +Modules/_io/_iomodule.h - _PyIO_str_closed - +Modules/_io/_iomodule.h - _PyIO_str_decode - +Modules/_io/_iomodule.h - _PyIO_str_encode - +Modules/_io/_iomodule.h - _PyIO_str_fileno - +Modules/_io/_iomodule.h - _PyIO_str_flush - +Modules/_io/_iomodule.h - _PyIO_str_getstate - +Modules/_io/_iomodule.h - _PyIO_str_isatty - +Modules/_io/_iomodule.h - _PyIO_str_newlines - +Modules/_io/_iomodule.h - _PyIO_str_nl - +Modules/_io/_iomodule.h - _PyIO_str_peek - +Modules/_io/_iomodule.h - _PyIO_str_read - +Modules/_io/_iomodule.h - _PyIO_str_read1 - +Modules/_io/_iomodule.h - _PyIO_str_readable - +Modules/_io/_iomodule.h - _PyIO_str_readall - +Modules/_io/_iomodule.h - _PyIO_str_readinto - +Modules/_io/_iomodule.h - _PyIO_str_readline - +Modules/_io/_iomodule.h - _PyIO_str_reset - +Modules/_io/_iomodule.h - _PyIO_str_seek - +Modules/_io/_iomodule.h - _PyIO_str_seekable - +Modules/_io/_iomodule.h - _PyIO_str_setstate - +Modules/_io/_iomodule.h - _PyIO_str_tell - +Modules/_io/_iomodule.h - _PyIO_str_truncate - +Modules/_io/_iomodule.h - _PyIO_str_writable - +Modules/_io/_iomodule.h - _PyIO_str_write - +Modules/_io/_iomodule.h - _PyIO_empty_str - +Modules/_io/_iomodule.h - _PyIO_empty_bytes - +Modules/_multiprocessing/multiprocessing.h - _PyMp_SemLockType - +Modules/_sqlite/module.c - _pysqlite_converters - +Modules/_sqlite/module.c - _pysqlite_enable_callback_tracebacks - +Modules/_sqlite/module.c - pysqlite_BaseTypeAdapted - +Modules/_sqlite/module.h - pysqlite_global_state - +Modules/_testcapimodule.c - _PyBytesIOBuffer_Type - +Modules/posixmodule.c - _Py_open_cloexec_works - +Modules/posixmodule.c - environ - +Objects/object.c - _Py_GenericAliasIterType - +Objects/object.c - _PyMemoryIter_Type - +Objects/object.c - _PyLineIterator - +Objects/object.c - _PyPositionsIterator - +Python/perf_trampoline.c - _Py_trampoline_func_start - +Python/perf_trampoline.c - _Py_trampoline_func_end - +Python/importdl.h - _PyImport_DynLoadFiletab - +Modules/expat/xmlrole.c - prolog0 - +Modules/expat/xmlrole.c - prolog1 - +Modules/expat/xmlrole.c - prolog2 - +Modules/expat/xmlrole.c - doctype0 - +Modules/expat/xmlrole.c - doctype1 - +Modules/expat/xmlrole.c - doctype2 - +Modules/expat/xmlrole.c - doctype3 - +Modules/expat/xmlrole.c - doctype4 - +Modules/expat/xmlrole.c - doctype5 - +Modules/expat/xmlrole.c - internalSubset - +Modules/expat/xmlrole.c - entity0 - +Modules/expat/xmlrole.c - entity1 - +Modules/expat/xmlrole.c - entity2 - +Modules/expat/xmlrole.c - entity3 - +Modules/expat/xmlrole.c - entity4 - +Modules/expat/xmlrole.c - entity5 - +Modules/expat/xmlrole.c - entity6 - +Modules/expat/xmlrole.c - entity7 - +Modules/expat/xmlrole.c - entity8 - +Modules/expat/xmlrole.c - entity9 - +Modules/expat/xmlrole.c - entity10 - +Modules/expat/xmlrole.c - notation0 - +Modules/expat/xmlrole.c - notation1 - +Modules/expat/xmlrole.c - notation2 - +Modules/expat/xmlrole.c - notation3 - +Modules/expat/xmlrole.c - notation4 - +Modules/expat/xmlrole.c - attlist0 - +Modules/expat/xmlrole.c - attlist1 - +Modules/expat/xmlrole.c - attlist2 - +Modules/expat/xmlrole.c - attlist3 - +Modules/expat/xmlrole.c - attlist4 - +Modules/expat/xmlrole.c - attlist5 - +Modules/expat/xmlrole.c - attlist6 - +Modules/expat/xmlrole.c - attlist7 - +Modules/expat/xmlrole.c - attlist8 - +Modules/expat/xmlrole.c - attlist9 - +Modules/expat/xmlrole.c - element0 - +Modules/expat/xmlrole.c - element1 - +Modules/expat/xmlrole.c - element2 - +Modules/expat/xmlrole.c - element3 - +Modules/expat/xmlrole.c - element4 - +Modules/expat/xmlrole.c - element5 - +Modules/expat/xmlrole.c - element6 - +Modules/expat/xmlrole.c - element7 - +Modules/expat/xmlrole.c - externalSubset0 - +Modules/expat/xmlrole.c - externalSubset1 - +Modules/expat/xmlrole.c - condSect0 - +Modules/expat/xmlrole.c - condSect1 - +Modules/expat/xmlrole.c - condSect2 - +Modules/expat/xmlrole.c - declClose - +Modules/expat/xmlrole.c - error - + +## other +Modules/_io/_iomodule.c - _PyIO_Module - +Modules/_sqlite/module.c - _sqlite3module - diff --git a/Tools/c-analyzer/table-file.py b/Tools/c-analyzer/table-file.py index 3cc05cc9de7779..d36f814415c8e7 100644 --- a/Tools/c-analyzer/table-file.py +++ b/Tools/c-analyzer/table-file.py @@ -1,43 +1,59 @@ +KINDS = [ + 'section-major', + 'section-minor', + 'section-group', + 'row', +] + + def iter_clean_lines(lines): lines = iter(lines) - for line in lines: - line = line.strip() - if line.startswith('# XXX'): + for rawline in lines: + line = rawline.strip() + if line.startswith('#') and not rawline.startswith('##'): continue - yield line + yield line, rawline def parse_table_lines(lines): lines = iter_clean_lines(lines) - for line in lines: - if line.startswith(('####', '#----')): - kind = 0 if line[1] == '#' else 1 - try: - line = next(lines).strip() - except StopIteration: - line = '' - if not line.startswith('# '): - raise NotImplementedError(line) - yield kind, line[2:].lstrip() - continue - - maybe = None - while line.startswith('#'): - if line != '#' and line[1] == ' ': - maybe = line[2:].lstrip() - try: - line = next(lines).strip() - except StopIteration: - return - if not line: - break - else: - if line: - if maybe: - yield 2, maybe - yield 'row', line + group = None + prev = '' + for line, rawline in lines: + if line.startswith('## '): + assert not rawline.startswith(' '), (line, rawline) + if group: + assert prev, (line, rawline) + kind, after, _ = group + assert kind and kind != 'section-group', (group, line, rawline) + assert after is not None, (group, line, rawline) + else: + assert not prev, (prev, line, rawline) + kind, after = group = ('section-group', None) + title = line[3:].lstrip() + assert title, (line, rawline) + if after is not None: + try: + line, rawline = next(lines) + except StopIteration: + line = None + if line != after: + raise NotImplementedError((group, line, rawline)) + yield kind, title + group = None + elif group: + raise NotImplementedError((group, line, rawline)) + elif line.startswith('##---'): + assert line.rstrip('-') == '##', (line, rawline) + group = ('section-minor', '', line) + elif line.startswith('#####'): + assert not line.strip('#'), (line, rawline) + group = ('section-major', '', line) + elif line: + yield 'row', line + prev = line def iter_sections(lines): @@ -49,12 +65,13 @@ def iter_sections(lines): if header is None: header = value continue - raise NotImplementedError(value) + raise NotImplementedError(repr(value)) yield tuple(section), value else: if header is None: header = False - section[kind:] = [value] + start = KINDS.index(kind) + section[start:] = [value] def collect_sections(lines): diff --git a/Tools/cases_generator/README.md b/Tools/cases_generator/README.md new file mode 100644 index 00000000000000..dc055ead1941cd --- /dev/null +++ b/Tools/cases_generator/README.md @@ -0,0 +1,33 @@ +# Tooling to generate interpreters + +What's currently here: + +- `lexer.py`: lexer for C, originally written by Mark Shannon +- `plexer.py`: OO interface on top of lexer.py; main class: `PLexer` +- `parser.py`: Parser for instruction definition DSL; main class `Parser` +- `generate_cases.py`: driver script to read `Python/bytecodes.c` and + write `Python/generated_cases.c.h` + +The DSL for the instruction definitions in `Python/bytecodes.c` is described +[here](https://github.com/faster-cpython/ideas/blob/main/3.12/interpreter_definition.md). +Note that there is some dummy C code at the top and bottom of the file +to fool text editors like VS Code into believing this is valid C code. + +## A bit about the parser + +The parser class uses a pretty standard recursive descent scheme, +but with unlimited backtracking. +The `PLexer` class tokenizes the entire input before parsing starts. +We do not run the C preprocessor. +Each parsing method returns either an AST node (a `Node` instance) +or `None`, or raises `SyntaxError` (showing the error in the C source). + +Most parsing methods are decorated with `@contextual`, which automatically +resets the tokenizer input position when `None` is returned. +Parsing methods may also raise `SyntaxError`, which is irrecoverable. +When a parsing method returns `None`, it is possible that after backtracking +a different parsing method returns a valid AST. + +Neither the lexer nor the parsers are complete or fully correct. +Most known issues are tersely indicated by `# TODO:` comments. +We plan to fix issues as they become relevant. diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py new file mode 100644 index 00000000000000..d75a36f887f9f5 --- /dev/null +++ b/Tools/cases_generator/generate_cases.py @@ -0,0 +1,754 @@ +"""Generate the main interpreter switch. + +Reads the instruction definitions from bytecodes.c. +Writes the cases to generated_cases.c.h, which is #included in ceval.c. +""" + +import argparse +import contextlib +import dataclasses +import os +import re +import sys +import typing + +import parser +from parser import StackEffect + +DEFAULT_INPUT = os.path.relpath( + os.path.join(os.path.dirname(__file__), "../../Python/bytecodes.c") +) +DEFAULT_OUTPUT = os.path.relpath( + os.path.join(os.path.dirname(__file__), "../../Python/generated_cases.c.h") +) +BEGIN_MARKER = "// BEGIN BYTECODES //" +END_MARKER = "// END BYTECODES //" +RE_PREDICTED = r"^\s*(?:PREDICT\(|GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);\s*$" +UNUSED = "unused" +BITS_PER_CODE_UNIT = 16 + +arg_parser = argparse.ArgumentParser( + description="Generate the code for the interpreter switch.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, +) +arg_parser.add_argument( + "-i", "--input", type=str, help="Instruction definitions", default=DEFAULT_INPUT +) +arg_parser.add_argument( + "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT +) + + +class Formatter: + """Wraps an output stream with the ability to indent etc.""" + + stream: typing.TextIO + prefix: str + + def __init__(self, stream: typing.TextIO, indent: int) -> None: + self.stream = stream + self.prefix = " " * indent + + def write_raw(self, s: str) -> None: + self.stream.write(s) + + def emit(self, arg: str) -> None: + if arg: + self.write_raw(f"{self.prefix}{arg}\n") + else: + self.write_raw("\n") + + @contextlib.contextmanager + def indent(self): + self.prefix += " " + yield + self.prefix = self.prefix[:-4] + + @contextlib.contextmanager + def block(self, head: str): + if head: + self.emit(head + " {") + else: + self.emit("{") + with self.indent(): + yield + self.emit("}") + + def stack_adjust(self, diff: int): + if diff > 0: + self.emit(f"STACK_GROW({diff});") + elif diff < 0: + self.emit(f"STACK_SHRINK({-diff});") + + def declare(self, dst: StackEffect, src: StackEffect | None): + if dst.name == UNUSED: + return + typ = f"{dst.type} " if dst.type else "PyObject *" + init = "" + if src: + cast = self.cast(dst, src) + init = f" = {cast}{src.name}" + self.emit(f"{typ}{dst.name}{init};") + + def assign(self, dst: StackEffect, src: StackEffect): + if src.name == UNUSED: + return + cast = self.cast(dst, src) + if m := re.match(r"^PEEK\((\d+)\)$", dst.name): + self.emit(f"POKE({m.group(1)}, {cast}{src.name});") + elif m := re.match(r"^REG\(oparg(\d+)\)$", dst.name): + self.emit(f"Py_XSETREF({dst.name}, {cast}{src.name});") + else: + self.emit(f"{dst.name} = {cast}{src.name};") + + def cast(self, dst: StackEffect, src: StackEffect) -> str: + return f"({dst.type or 'PyObject *'})" if src.type != dst.type else "" + + +@dataclasses.dataclass +class Instruction: + """An instruction with additional data and code.""" + + # Parts of the underlying instruction definition + inst: parser.InstDef + register: bool + kind: typing.Literal["inst", "op"] + name: str + block: parser.Block + block_text: list[str] # Block.text, less curlies, less PREDICT() calls + predictions: list[str] # Prediction targets (instruction names) + + # Computed by constructor + always_exits: bool + cache_offset: int + cache_effects: list[parser.CacheEffect] + input_effects: list[StackEffect] + output_effects: list[StackEffect] + input_registers: list[str] # Parallel to input_effects + output_registers: list[str] # Etc. + + # Set later + family: parser.Family | None = None + predicted: bool = False + + def __init__(self, inst: parser.InstDef): + self.inst = inst + self.register = inst.register + self.kind = inst.kind + self.name = inst.name + self.block = inst.block + self.block_text, self.predictions = extract_block_text(self.block) + self.always_exits = always_exits(self.block_text) + self.cache_effects = [ + effect for effect in inst.inputs if isinstance(effect, parser.CacheEffect) + ] + self.cache_offset = sum(c.size for c in self.cache_effects) + self.input_effects = [ + effect for effect in inst.inputs if isinstance(effect, StackEffect) + ] + self.output_effects = inst.outputs # For consistency/completeness + + def analyze_registers(self, a: "Analyzer") -> None: + regs = iter(("REG(oparg1)", "REG(oparg2)", "REG(oparg3)")) + try: + self.input_registers = [next(regs) for _ in self.input_effects] + self.output_registers = [next(regs) for _ in self.output_effects] + except StopIteration: # Running out of registers + a.error(f"Instruction {self.name} has too many register effects") + + def write(self, out: Formatter) -> None: + """Write one instruction, sans prologue and epilogue.""" + # Write a static assertion that a family's cache size is correct + if family := self.family: + if self.name == family.members[0]: + if cache_size := family.size: + out.emit( + f"static_assert({cache_size} == " + f'{self.cache_offset}, "incorrect cache size");' + ) + + if not self.register: + # Write input stack effect variable declarations and initializations + for i, ieffect in enumerate(reversed(self.input_effects), 1): + src = StackEffect(f"PEEK({i})", "") + out.declare(ieffect, src) + else: + # Write input register variable declarations and initializations + for ieffect, reg in zip(self.input_effects, self.input_registers): + src = StackEffect(reg, "") + out.declare(ieffect, src) + + # Write output stack effect variable declarations + input_names = {ieffect.name for ieffect in self.input_effects} + for oeffect in self.output_effects: + if oeffect.name not in input_names: + out.declare(oeffect, None) + + self.write_body(out, 0) + + # Skip the rest if the block always exits + if self.always_exits: + return + + if not self.register: + # Write net stack growth/shrinkage + diff = len(self.output_effects) - len(self.input_effects) + out.stack_adjust(diff) + + # Write output stack effect assignments + unmoved_names: set[str] = set() + for ieffect, oeffect in zip(self.input_effects, self.output_effects): + if ieffect.name == oeffect.name: + unmoved_names.add(ieffect.name) + for i, oeffect in enumerate(reversed(self.output_effects), 1): + if oeffect.name not in unmoved_names: + dst = StackEffect(f"PEEK({i})", "") + out.assign(dst, oeffect) + else: + # Write output register assignments + for oeffect, reg in zip(self.output_effects, self.output_registers): + dst = StackEffect(reg, "") + out.assign(dst, oeffect) + + # Write cache effect + if self.cache_offset: + out.emit(f"JUMPBY({self.cache_offset});") + + def write_body(self, out: Formatter, dedent: int, cache_adjust: int = 0) -> None: + """Write the instruction body.""" + # Write cache effect variable declarations and initializations + cache_offset = cache_adjust + for ceffect in self.cache_effects: + if ceffect.name != UNUSED: + bits = ceffect.size * BITS_PER_CODE_UNIT + if bits == 64: + # NOTE: We assume that 64-bit data in the cache + # is always an object pointer. + # If this becomes false, we need a way to specify + # syntactically what type the cache data is. + typ = "PyObject *" + func = "read_obj" + else: + typ = f"uint{bits}_t " + func = f"read_u{bits}" + out.emit(f"{typ}{ceffect.name} = {func}(&next_instr[{cache_offset}].cache);") + cache_offset += ceffect.size + assert cache_offset == self.cache_offset + cache_adjust + + # Write the body, substituting a goto for ERROR_IF() and other stuff + assert dedent <= 0 + extra = " " * -dedent + for line in self.block_text: + if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*$", line): + space, cond, label = m.groups() + # ERROR_IF() must pop the inputs from the stack. + # The code block is responsible for DECREF()ing them. + # NOTE: If the label doesn't exist, just add it to ceval.c. + if not self.register: + ninputs = len(self.input_effects) + # Don't pop common input/output effects at the bottom! + # These aren't DECREF'ed so they can stay. + for ieff, oeff in zip(self.input_effects, self.output_effects): + if ieff.name == oeff.name: + ninputs -= 1 + else: + break + else: + ninputs = 0 + if ninputs: + out.write_raw( + f"{extra}{space}if ({cond}) goto pop_{ninputs}_{label};\n" + ) + else: + out.write_raw(f"{extra}{space}if ({cond}) goto {label};\n") + elif m := re.match(r"(\s*)DECREF_INPUTS\(\);\s*$", line): + if not self.register: + space = m.group(1) + for ieff in self.input_effects: + out.write_raw(f"{extra}{space}Py_DECREF({ieff.name});\n") + else: + out.write_raw(extra + line) + + +InstructionOrCacheEffect = Instruction | parser.CacheEffect +StackEffectMapping = list[tuple[StackEffect, StackEffect]] + + +@dataclasses.dataclass +class Component: + instr: Instruction + input_mapping: StackEffectMapping + output_mapping: StackEffectMapping + + def write_body(self, out: Formatter, cache_adjust: int) -> None: + with out.block(""): + for var, ieffect in self.input_mapping: + out.declare(ieffect, var) + for _, oeffect in self.output_mapping: + out.declare(oeffect, None) + + self.instr.write_body(out, dedent=-4, cache_adjust=cache_adjust) + + for var, oeffect in self.output_mapping: + out.assign(var, oeffect) + + +@dataclasses.dataclass +class SuperOrMacroInstruction: + """Common fields for super- and macro instructions.""" + + name: str + stack: list[StackEffect] + initial_sp: int + final_sp: int + + +@dataclasses.dataclass +class SuperInstruction(SuperOrMacroInstruction): + """A super-instruction.""" + + super: parser.Super + parts: list[Component] + + +@dataclasses.dataclass +class MacroInstruction(SuperOrMacroInstruction): + """A macro instruction.""" + + macro: parser.Macro + parts: list[Component | parser.CacheEffect] + + +class Analyzer: + """Parse input, analyze it, and write to output.""" + + filename: str + output_filename: str + src: str + errors: int = 0 + + def __init__(self, filename: str, output_filename: str): + """Read the input file.""" + self.filename = filename + self.output_filename = output_filename + with open(filename) as f: + self.src = f.read() + + def error(self, msg: str, node: parser.Node) -> None: + lineno = 0 + if context := node.context: + # Use line number of first non-comment in the node + for token in context.owner.tokens[context.begin : context.end]: + lineno = token.line + if token.kind != "COMMENT": + break + print(f"{self.filename}:{lineno}: {msg}", file=sys.stderr) + self.errors += 1 + + everything: list[parser.InstDef | parser.Super | parser.Macro] + instrs: dict[str, Instruction] # Includes ops + supers: dict[str, parser.Super] + super_instrs: dict[str, SuperInstruction] + macros: dict[str, parser.Macro] + macro_instrs: dict[str, MacroInstruction] + families: dict[str, parser.Family] + + def parse(self) -> None: + """Parse the source text. + + We only want the parser to see the stuff between the + begin and end markers. + """ + psr = parser.Parser(self.src, filename=self.filename) + + # Skip until begin marker + while tkn := psr.next(raw=True): + if tkn.text == BEGIN_MARKER: + break + else: + raise psr.make_syntax_error( + f"Couldn't find {BEGIN_MARKER!r} in {psr.filename}" + ) + start = psr.getpos() + + # Find end marker, then delete everything after it + while tkn := psr.next(raw=True): + if tkn.text == END_MARKER: + break + del psr.tokens[psr.getpos() - 1 :] + + # Parse from start + psr.setpos(start) + self.everything = [] + self.instrs = {} + self.supers = {} + self.macros = {} + self.families = {} + while thing := psr.definition(): + match thing: + case parser.InstDef(name=name): + self.instrs[name] = Instruction(thing) + self.everything.append(thing) + case parser.Super(name): + self.supers[name] = thing + self.everything.append(thing) + case parser.Macro(name): + self.macros[name] = thing + self.everything.append(thing) + case parser.Family(name): + self.families[name] = thing + case _: + typing.assert_never(thing) + if not psr.eof(): + raise psr.make_syntax_error("Extra stuff at the end") + + print( + f"Read {len(self.instrs)} instructions/ops, " + f"{len(self.supers)} supers, {len(self.macros)} macros, " + f"and {len(self.families)} families from {self.filename}", + file=sys.stderr, + ) + + def analyze(self) -> None: + """Analyze the inputs. + + Raises SystemExit if there is an error. + """ + self.find_predictions() + self.map_families() + self.check_families() + self.analyze_register_instrs() + self.analyze_supers_and_macros() + + def find_predictions(self) -> None: + """Find the instructions that need PREDICTED() labels.""" + for instr in self.instrs.values(): + targets = set(instr.predictions) + for line in instr.block_text: + if m := re.match(RE_PREDICTED, line): + targets.add(m.group(1)) + for target in targets: + if target_instr := self.instrs.get(target): + target_instr.predicted = True + else: + self.error( + f"Unknown instruction {target!r} predicted in {instr.name!r}", + instr.inst, # TODO: Use better location + ) + + def map_families(self) -> None: + """Make instruction names back to their family, if they have one.""" + for family in self.families.values(): + for member in family.members: + if member_instr := self.instrs.get(member): + member_instr.family = family + else: + self.error( + f"Unknown instruction {member!r} referenced in family {family.name!r}", + family, + ) + + def check_families(self) -> None: + """Check each family: + + - Must have at least 2 members + - All members must be known instructions + - All members must have the same cache, input and output effects + """ + for family in self.families.values(): + if len(family.members) < 2: + self.error(f"Family {family.name!r} has insufficient members", family) + members = [member for member in family.members if member in self.instrs] + if members != family.members: + unknown = set(family.members) - set(members) + self.error( + f"Family {family.name!r} has unknown members: {unknown}", family + ) + if len(members) < 2: + continue + head = self.instrs[members[0]] + cache = head.cache_offset + input = len(head.input_effects) + output = len(head.output_effects) + for member in members[1:]: + instr = self.instrs[member] + c = instr.cache_offset + i = len(instr.input_effects) + o = len(instr.output_effects) + if (c, i, o) != (cache, input, output): + self.error( + f"Family {family.name!r} has inconsistent " + f"(cache, inputs, outputs) effects:\n" + f" {family.members[0]} = {(cache, input, output)}; " + f"{member} = {(c, i, o)}", + family, + ) + + def analyze_register_instrs(self) -> None: + for instr in self.instrs.values(): + if instr.register: + instr.analyze_registers(self) + + def analyze_supers_and_macros(self) -> None: + """Analyze each super- and macro instruction.""" + self.super_instrs = {} + self.macro_instrs = {} + for name, super in self.supers.items(): + self.super_instrs[name] = self.analyze_super(super) + for name, macro in self.macros.items(): + self.macro_instrs[name] = self.analyze_macro(macro) + + def analyze_super(self, super: parser.Super) -> SuperInstruction: + components = self.check_super_components(super) + stack, initial_sp = self.stack_analysis(components) + sp = initial_sp + parts: list[Component] = [] + for instr in components: + part, sp = self.analyze_instruction(instr, stack, sp) + parts.append(part) + final_sp = sp + return SuperInstruction(super.name, stack, initial_sp, final_sp, super, parts) + + def analyze_macro(self, macro: parser.Macro) -> MacroInstruction: + components = self.check_macro_components(macro) + stack, initial_sp = self.stack_analysis(components) + sp = initial_sp + parts: list[Component | parser.CacheEffect] = [] + for component in components: + match component: + case parser.CacheEffect() as ceffect: + parts.append(ceffect) + case Instruction() as instr: + part, sp = self.analyze_instruction(instr, stack, sp) + parts.append(part) + case _: + typing.assert_never(component) + final_sp = sp + return MacroInstruction(macro.name, stack, initial_sp, final_sp, macro, parts) + + def analyze_instruction( + self, instr: Instruction, stack: list[StackEffect], sp: int + ) -> tuple[Component, int]: + input_mapping: StackEffectMapping = [] + for ieffect in reversed(instr.input_effects): + sp -= 1 + input_mapping.append((stack[sp], ieffect)) + output_mapping: StackEffectMapping = [] + for oeffect in instr.output_effects: + output_mapping.append((stack[sp], oeffect)) + sp += 1 + return Component(instr, input_mapping, output_mapping), sp + + def check_super_components(self, super: parser.Super) -> list[Instruction]: + components: list[Instruction] = [] + for op in super.ops: + if op.name not in self.instrs: + self.error(f"Unknown instruction {op.name!r}", super) + else: + components.append(self.instrs[op.name]) + return components + + def check_macro_components( + self, macro: parser.Macro + ) -> list[InstructionOrCacheEffect]: + components: list[InstructionOrCacheEffect] = [] + for uop in macro.uops: + match uop: + case parser.OpName(name): + if name not in self.instrs: + self.error(f"Unknown instruction {name!r}", macro) + components.append(self.instrs[name]) + case parser.CacheEffect(): + components.append(uop) + case _: + typing.assert_never(uop) + return components + + def stack_analysis( + self, components: typing.Iterable[InstructionOrCacheEffect] + ) -> tuple[list[StackEffect], int]: + """Analyze a super-instruction or macro. + + Print an error if there's a cache effect (which we don't support yet). + + Return the list of variable names and the initial stack pointer. + """ + lowest = current = highest = 0 + for thing in components: + match thing: + case Instruction() as instr: + current -= len(instr.input_effects) + lowest = min(lowest, current) + current += len(instr.output_effects) + highest = max(highest, current) + case parser.CacheEffect(): + pass + case _: + typing.assert_never(thing) + # At this point, 'current' is the net stack effect, + # and 'lowest' and 'highest' are the extremes. + # Note that 'lowest' may be negative. + # TODO: Reverse the numbering. + stack = [ + StackEffect(f"_tmp_{i+1}", "") for i in reversed(range(highest - lowest)) + ] + return stack, -lowest + + def write_instructions(self) -> None: + """Write instructions to output file.""" + with open(self.output_filename, "w") as f: + # Write provenance header + f.write(f"// This file is generated by {os.path.relpath(__file__)}\n") + f.write(f"// from {os.path.relpath(self.filename)}\n") + f.write(f"// Do not edit!\n") + + # Create formatter; the rest of the code uses this. + self.out = Formatter(f, 8) + + # Write and count instructions of all kinds + n_instrs = 0 + n_supers = 0 + n_macros = 0 + for thing in self.everything: + match thing: + case parser.InstDef(): + if thing.kind == "inst": + n_instrs += 1 + self.write_instr(self.instrs[thing.name]) + case parser.Super(): + n_supers += 1 + self.write_super(self.super_instrs[thing.name]) + case parser.Macro(): + n_macros += 1 + self.write_macro(self.macro_instrs[thing.name]) + case _: + typing.assert_never(thing) + + print( + f"Wrote {n_instrs} instructions, {n_supers} supers, " + f"and {n_macros} macros to {self.output_filename}", + file=sys.stderr, + ) + + def write_instr(self, instr: Instruction) -> None: + name = instr.name + self.out.emit("") + with self.out.block(f"TARGET({name})"): + if instr.predicted: + self.out.emit(f"PREDICTED({name});") + instr.write(self.out) + if not instr.always_exits: + for prediction in instr.predictions: + self.out.emit(f"PREDICT({prediction});") + self.out.emit(f"DISPATCH();") + + def write_super(self, sup: SuperInstruction) -> None: + """Write code for a super-instruction.""" + with self.wrap_super_or_macro(sup): + first = True + for comp in sup.parts: + if not first: + self.out.emit("NEXTOPARG();") + self.out.emit(f"JUMPBY(OPSIZE);") + first = False + comp.write_body(self.out, 0) + if comp.instr.cache_offset: + self.out.emit(f"JUMPBY({comp.instr.cache_offset});") + + def write_macro(self, mac: MacroInstruction) -> None: + """Write code for a macro instruction.""" + with self.wrap_super_or_macro(mac): + cache_adjust = 0 + for part in mac.parts: + match part: + case parser.CacheEffect(size=size): + cache_adjust += size + case Component() as comp: + comp.write_body(self.out, cache_adjust) + cache_adjust += comp.instr.cache_offset + + if cache_adjust: + self.out.emit(f"JUMPBY({cache_adjust});") + + @contextlib.contextmanager + def wrap_super_or_macro(self, up: SuperOrMacroInstruction): + """Shared boilerplate for super- and macro instructions.""" + # TODO: Somewhere (where?) make it so that if one instruction + # has an output that is input to another, and the variable names + # and types match and don't conflict with other instructions, + # that variable is declared with the right name and type in the + # outer block, rather than trusting the compiler to optimize it. + self.out.emit("") + with self.out.block(f"TARGET({up.name})"): + for i, var in reversed(list(enumerate(up.stack))): + src = None + if i < up.initial_sp: + src = StackEffect(f"PEEK({up.initial_sp - i})", "") + self.out.declare(var, src) + + yield + + self.out.stack_adjust(up.final_sp - up.initial_sp) + for i, var in enumerate(reversed(up.stack[: up.final_sp]), 1): + dst = StackEffect(f"PEEK({i})", "") + self.out.assign(dst, var) + + self.out.emit(f"DISPATCH();") + + +def extract_block_text(block: parser.Block) -> tuple[list[str], list[str]]: + # Get lines of text with proper dedent + blocklines = block.text.splitlines(True) + + # Remove blank lines from both ends + while blocklines and not blocklines[0].strip(): + blocklines.pop(0) + while blocklines and not blocklines[-1].strip(): + blocklines.pop() + + # Remove leading and trailing braces + assert blocklines and blocklines[0].strip() == "{" + assert blocklines and blocklines[-1].strip() == "}" + blocklines.pop() + blocklines.pop(0) + + # Remove trailing blank lines + while blocklines and not blocklines[-1].strip(): + blocklines.pop() + + # Separate PREDICT(...) macros from end + predictions: list[str] = [] + while blocklines and (m := re.match(r"^\s*PREDICT\((\w+)\);\s*$", blocklines[-1])): + predictions.insert(0, m.group(1)) + blocklines.pop() + + return blocklines, predictions + + +def always_exits(lines: list[str]) -> bool: + """Determine whether a block always ends in a return/goto/etc.""" + if not lines: + return False + line = lines[-1].rstrip() + # Indent must match exactly (TODO: Do something better) + if line[:12] != " " * 12: + return False + line = line[12:] + return line.startswith( + ("goto ", "return ", "DISPATCH", "GO_TO_", "Py_UNREACHABLE()") + ) + + +def main(): + """Parse command line, parse input, analyze, write output.""" + args = arg_parser.parse_args() # Prints message and sys.exit(2) on error + a = Analyzer(args.input, args.output) # Raises OSError if input unreadable + a.parse() # Raises SyntaxError on failure + a.analyze() # Prints messages and sets a.errors on failure + if a.errors: + sys.exit(f"Found {a.errors} errors") + a.write_instructions() # Raises OSError if output can't be written + + +if __name__ == "__main__": + main() diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py new file mode 100644 index 00000000000000..39b6a212a67b1c --- /dev/null +++ b/Tools/cases_generator/lexer.py @@ -0,0 +1,262 @@ +# Parser for C code +# Originally by Mark Shannon (mark@hotpy.org) +# https://gist.github.com/markshannon/db7ab649440b5af765451bb77c7dba34 + +import re +import sys +import collections +from dataclasses import dataclass + +def choice(*opts): + return "|".join("(%s)" % opt for opt in opts) + +# Regexes + +# Longer operators must go before shorter ones. + +PLUSPLUS = r'\+\+' +MINUSMINUS = r'--' + +# -> +ARROW = r'->' +ELLIPSIS = r'\.\.\.' + +# Assignment operators +TIMESEQUAL = r'\*=' +DIVEQUAL = r'/=' +MODEQUAL = r'%=' +PLUSEQUAL = r'\+=' +MINUSEQUAL = r'-=' +LSHIFTEQUAL = r'<<=' +RSHIFTEQUAL = r'>>=' +ANDEQUAL = r'&=' +OREQUAL = r'\|=' +XOREQUAL = r'\^=' + +# Operators +PLUS = r'\+' +MINUS = r'-' +TIMES = r'\*' +DIVIDE = r'/' +MOD = r'%' +NOT = r'~' +XOR = r'\^' +LOR = r'\|\|' +LAND = r'&&' +LSHIFT = r'<<' +RSHIFT = r'>>' +LE = r'<=' +GE = r'>=' +EQ = r'==' +NE = r'!=' +LT = r'<' +GT = r'>' +LNOT = r'!' +OR = r'\|' +AND = r'&' +EQUALS = r'=' + +# ? +CONDOP = r'\?' + +# Delimiters +LPAREN = r'\(' +RPAREN = r'\)' +LBRACKET = r'\[' +RBRACKET = r'\]' +LBRACE = r'\{' +RBRACE = r'\}' +COMMA = r',' +PERIOD = r'\.' +SEMI = r';' +COLON = r':' +BACKSLASH = r'\\' + +operators = { op: pattern for op, pattern in globals().items() if op == op.upper() } +for op in operators: + globals()[op] = op +opmap = { pattern.replace("\\", "") or '\\' : op for op, pattern in operators.items() } + +# Macros +macro = r'# *(ifdef|ifndef|undef|define|error|endif|if|else|include|#)' +MACRO = 'MACRO' + +id_re = r'[a-zA-Z_][0-9a-zA-Z_]*' +IDENTIFIER = 'IDENTIFIER' + +suffix = r'([uU]?[lL]?[lL]?)' +octal = r'0[0-7]+' + suffix +hex = r'0[xX][0-9a-fA-F]+' +decimal_digits = r'(0|[1-9][0-9]*)' +decimal = decimal_digits + suffix + + +exponent = r"""([eE][-+]?[0-9]+)""" +fraction = r"""([0-9]*\.[0-9]+)|([0-9]+\.)""" +float = '(((('+fraction+')'+exponent+'?)|([0-9]+'+exponent+'))[FfLl]?)' + +number_re = choice(octal, hex, float, decimal) +NUMBER = 'NUMBER' + +simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])""" +decimal_escape = r"""(\d+)""" +hex_escape = r"""(x[0-9a-fA-F]+)""" +escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))' +string_char = r"""([^"\\\n]|"""+escape_sequence+')' +str_re = '"'+string_char+'*"' +STRING = 'STRING' +char = r'\'.\'' # TODO: escape sequence +CHARACTER = 'CHARACTER' + +comment_re = r'//.*|/\*([^*]|\*[^/])*\*/' +COMMENT = 'COMMENT' + +newline = r"\n" +invalid = r"\S" # A single non-space character that's not caught by any of the other patterns +matcher = re.compile(choice(id_re, number_re, str_re, char, newline, macro, comment_re, *operators.values(), invalid)) +letter = re.compile(r'[a-zA-Z_]') + +kwds = ( + 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', + 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN', + 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', + 'REGISTER', 'OFFSETOF', + 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', + 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID', + 'VOLATILE', 'WHILE' +) +for name in kwds: + globals()[name] = name +keywords = { name.lower() : name for name in kwds } + + +def make_syntax_error( + message: str, filename: str, line: int, column: int, line_text: str, +) -> SyntaxError: + return SyntaxError(message, (filename, line, column, line_text)) + + +@dataclass(slots=True) +class Token: + kind: str + text: str + begin: tuple[int, int] + end: tuple[int, int] + + @property + def line(self): + return self.begin[0] + + @property + def column(self): + return self.begin[1] + + @property + def end_line(self): + return self.end[0] + + @property + def end_column(self): + return self.end[1] + + @property + def width(self): + return self.end[1] - self.begin[1] + + def replaceText(self, txt): + assert isinstance(txt, str) + return Token(self.kind, txt, self.begin, self.end) + + def __repr__(self): + b0, b1 = self.begin + e0, e1 = self.end + if b0 == e0: + return f"{self.kind}({self.text!r}, {b0}:{b1}:{e1})" + else: + return f"{self.kind}({self.text!r}, {b0}:{b1}, {e0}:{e1})" + + +def tokenize(src, line=1, filename=None): + linestart = -1 + for m in matcher.finditer(src): + start, end = m.span() + text = m.group(0) + if text in keywords: + kind = keywords[text] + elif letter.match(text): + kind = IDENTIFIER + elif text == '...': + kind = ELLIPSIS + elif text == '.': + kind = PERIOD + elif text[0] in '0123456789.': + kind = NUMBER + elif text[0] == '"': + kind = STRING + elif text in opmap: + kind = opmap[text] + elif text == '\n': + linestart = start + line += 1 + kind = '\n' + elif text[0] == "'": + kind = CHARACTER + elif text[0] == '#': + kind = MACRO + elif text[0] == '/' and text[1] in '/*': + kind = COMMENT + else: + lineend = src.find("\n", start) + if lineend == -1: + lineend = len(src) + raise make_syntax_error(f"Bad token: {text}", + filename, line, start-linestart+1, src[linestart:lineend]) + if kind == COMMENT: + begin = line, start-linestart + newlines = text.count('\n') + if newlines: + linestart = start + text.rfind('\n') + line += newlines + else: + begin = line, start-linestart + if kind != "\n": + yield Token(kind, text, begin, (line, start-linestart+len(text))) + + +__all__ = [] +__all__.extend([kind for kind in globals() if kind.upper() == kind]) + + +def to_text(tkns: list[Token], dedent: int = 0) -> str: + res: list[str] = [] + line, col = -1, 1+dedent + for tkn in tkns: + if line == -1: + line, _ = tkn.begin + l, c = tkn.begin + #assert(l >= line), (line, txt, start, end) + while l > line: + line += 1 + res.append('\n') + col = 1+dedent + res.append(' '*(c-col)) + text = tkn.text + if dedent != 0 and tkn.kind == 'COMMENT' and '\n' in text: + if dedent < 0: + text = text.replace('\n', '\n' + ' '*-dedent) + # TODO: dedent > 0 + res.append(text) + line, col = tkn.end + return ''.join(res) + + +if __name__ == "__main__": + import sys + filename = sys.argv[1] + if filename == "-c": + src = sys.argv[2] + else: + src = open(filename).read() + # print(to_text(tokenize(src))) + for tkn in tokenize(src, filename=filename): + print(tkn) diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py new file mode 100644 index 00000000000000..06a4f5fb35261e --- /dev/null +++ b/Tools/cases_generator/parser.py @@ -0,0 +1,369 @@ +"""Parser for bytecodes.inst.""" + +from dataclasses import dataclass, field +from typing import NamedTuple, Callable, TypeVar, Literal + +import lexer as lx +from plexer import PLexer + + +P = TypeVar("P", bound="Parser") +N = TypeVar("N", bound="Node") + + +def contextual(func: Callable[[P], N | None]) -> Callable[[P], N | None]: + # Decorator to wrap grammar methods. + # Resets position if `func` returns None. + def contextual_wrapper(self: P) -> N | None: + begin = self.getpos() + res = func(self) + if res is None: + self.setpos(begin) + return + end = self.getpos() + res.context = Context(begin, end, self) + return res + + return contextual_wrapper + + +class Context(NamedTuple): + begin: int + end: int + owner: PLexer + + def __repr__(self): + return f"<{self.begin}-{self.end}>" + + +@dataclass +class Node: + context: Context | None = field(init=False, default=None) + + @property + def text(self) -> str: + return self.to_text() + + def to_text(self, dedent: int = 0) -> str: + context = self.context + if not context: + return "" + tokens = context.owner.tokens + begin = context.begin + end = context.end + return lx.to_text(tokens[begin:end], dedent) + + +@dataclass +class Block(Node): + tokens: list[lx.Token] + + +@dataclass +class StackEffect(Node): + name: str + type: str = "" + # TODO: array, condition + + +@dataclass +class CacheEffect(Node): + name: str + size: int + + +@dataclass +class OpName(Node): + name: str + + +InputEffect = StackEffect | CacheEffect +OutputEffect = StackEffect +UOp = OpName | CacheEffect + + +@dataclass +class InstHeader(Node): + register: bool + kind: Literal["inst", "op"] + name: str + inputs: list[InputEffect] + outputs: list[OutputEffect] + + +@dataclass +class InstDef(Node): + register: bool + kind: Literal["inst", "op"] + name: str + inputs: list[InputEffect] + outputs: list[OutputEffect] + block: Block + + +@dataclass +class Super(Node): + name: str + ops: list[OpName] + + +@dataclass +class Macro(Node): + name: str + uops: list[UOp] + + +@dataclass +class Family(Node): + name: str + size: str # Variable giving the cache size in code units + members: list[str] + + +class Parser(PLexer): + @contextual + def definition(self) -> InstDef | Super | Macro | Family | None: + if inst := self.inst_def(): + return inst + if super := self.super_def(): + return super + if macro := self.macro_def(): + return macro + if family := self.family_def(): + return family + + @contextual + def inst_def(self) -> InstDef | None: + if hdr := self.inst_header(): + if block := self.block(): + return InstDef(hdr.register, hdr.kind, hdr.name, hdr.inputs, hdr.outputs, block) + raise self.make_syntax_error("Expected block") + return None + + @contextual + def inst_header(self) -> InstHeader | None: + # inst(NAME) + # | [register] inst(NAME, (inputs -- outputs)) + # | [register] op(NAME, (inputs -- outputs)) + # TODO: Make INST a keyword in the lexer. + register = bool(self.expect(lx.REGISTER)) + if (tkn := self.expect(lx.IDENTIFIER)) and (kind := tkn.text) in ("inst", "op"): + if self.expect(lx.LPAREN) and (tkn := self.expect(lx.IDENTIFIER)): + name = tkn.text + if self.expect(lx.COMMA): + inp, outp = self.io_effect() + if self.expect(lx.RPAREN): + if (tkn := self.peek()) and tkn.kind == lx.LBRACE: + return InstHeader(register, kind, name, inp, outp) + elif self.expect(lx.RPAREN) and kind == "inst": + # No legacy stack effect if kind is "op". + return InstHeader(register, kind, name, [], []) + return None + + def io_effect(self) -> tuple[list[InputEffect], list[OutputEffect]]: + # '(' [inputs] '--' [outputs] ')' + if self.expect(lx.LPAREN): + inputs = self.inputs() or [] + if self.expect(lx.MINUSMINUS): + outputs = self.outputs() or [] + if self.expect(lx.RPAREN): + return inputs, outputs + raise self.make_syntax_error("Expected stack effect") + + def inputs(self) -> list[InputEffect] | None: + # input (',' input)* + here = self.getpos() + if inp := self.input(): + near = self.getpos() + if self.expect(lx.COMMA): + if rest := self.inputs(): + return [inp] + rest + self.setpos(near) + return [inp] + self.setpos(here) + return None + + @contextual + def input(self) -> InputEffect | None: + return self.cache_effect() or self.stack_effect() + + def outputs(self) -> list[OutputEffect] | None: + # output (, output)* + here = self.getpos() + if outp := self.output(): + near = self.getpos() + if self.expect(lx.COMMA): + if rest := self.outputs(): + return [outp] + rest + self.setpos(near) + return [outp] + self.setpos(here) + return None + + @contextual + def output(self) -> OutputEffect | None: + return self.stack_effect() + + @contextual + def cache_effect(self) -> CacheEffect | None: + # IDENTIFIER '/' NUMBER + if tkn := self.expect(lx.IDENTIFIER): + if self.expect(lx.DIVIDE): + num = self.require(lx.NUMBER).text + try: + size = int(num) + except ValueError: + raise self.make_syntax_error(f"Expected integer, got {num!r}") + else: + return CacheEffect(tkn.text, size) + + @contextual + def stack_effect(self) -> StackEffect | None: + # IDENTIFIER [':' IDENTIFIER] + # TODO: Arrays, conditions + if tkn := self.expect(lx.IDENTIFIER): + type = "" + if self.expect(lx.COLON): + type = self.require(lx.IDENTIFIER).text + return StackEffect(tkn.text, type) + + @contextual + def super_def(self) -> Super | None: + if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text == "super": + if self.expect(lx.LPAREN): + if tkn := self.expect(lx.IDENTIFIER): + if self.expect(lx.RPAREN): + if self.expect(lx.EQUALS): + if ops := self.ops(): + self.require(lx.SEMI) + res = Super(tkn.text, ops) + return res + + def ops(self) -> list[OpName] | None: + if op := self.op(): + ops = [op] + while self.expect(lx.PLUS): + if op := self.op(): + ops.append(op) + return ops + + @contextual + def op(self) -> OpName | None: + if tkn := self.expect(lx.IDENTIFIER): + return OpName(tkn.text) + + @contextual + def macro_def(self) -> Macro | None: + if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text == "macro": + if self.expect(lx.LPAREN): + if tkn := self.expect(lx.IDENTIFIER): + if self.expect(lx.RPAREN): + if self.expect(lx.EQUALS): + if uops := self.uops(): + self.require(lx.SEMI) + res = Macro(tkn.text, uops) + return res + + def uops(self) -> list[UOp] | None: + if uop := self.uop(): + uops = [uop] + while self.expect(lx.PLUS): + if uop := self.uop(): + uops.append(uop) + else: + raise self.make_syntax_error("Expected op name or cache effect") + return uops + + @contextual + def uop(self) -> UOp | None: + if tkn := self.expect(lx.IDENTIFIER): + if self.expect(lx.DIVIDE): + if num := self.expect(lx.NUMBER): + try: + size = int(num.text) + except ValueError: + raise self.make_syntax_error( + f"Expected integer, got {num.text!r}" + ) + else: + return CacheEffect(tkn.text, size) + raise self.make_syntax_error("Expected integer") + else: + return OpName(tkn.text) + + @contextual + def family_def(self) -> Family | None: + if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text == "family": + size = None + if self.expect(lx.LPAREN): + if tkn := self.expect(lx.IDENTIFIER): + if self.expect(lx.COMMA): + if not (size := self.expect(lx.IDENTIFIER)): + raise self.make_syntax_error("Expected identifier") + if self.expect(lx.RPAREN): + if self.expect(lx.EQUALS): + if not self.expect(lx.LBRACE): + raise self.make_syntax_error("Expected {") + if members := self.members(): + if self.expect(lx.RBRACE) and self.expect(lx.SEMI): + return Family( + tkn.text, size.text if size else "", members + ) + return None + + def members(self) -> list[str] | None: + here = self.getpos() + if tkn := self.expect(lx.IDENTIFIER): + members = [tkn.text] + while self.expect(lx.COMMA): + if tkn := self.expect(lx.IDENTIFIER): + members.append(tkn.text) + else: + break + peek = self.peek() + if not peek or peek.kind != lx.RBRACE: + raise self.make_syntax_error("Expected comma or right paren") + return members + self.setpos(here) + return None + + @contextual + def block(self) -> Block: + tokens = self.c_blob() + return Block(tokens) + + def c_blob(self) -> list[lx.Token]: + tokens: list[lx.Token] = [] + level = 0 + while tkn := self.next(raw=True): + if tkn.kind in (lx.LBRACE, lx.LPAREN, lx.LBRACKET): + level += 1 + elif tkn.kind in (lx.RBRACE, lx.RPAREN, lx.RBRACKET): + level -= 1 + if level <= 0: + break + tokens.append(tkn) + return tokens + + +if __name__ == "__main__": + import sys + + if sys.argv[1:]: + filename = sys.argv[1] + if filename == "-c" and sys.argv[2:]: + src = sys.argv[2] + filename = "<string>" + else: + with open(filename) as f: + src = f.read() + srclines = src.splitlines() + begin = srclines.index("// BEGIN BYTECODES //") + end = srclines.index("// END BYTECODES //") + src = "\n".join(srclines[begin + 1 : end]) + else: + filename = "<default>" + src = "if (x) { x.foo; // comment\n}" + parser = Parser(src, filename) + x = parser.definition() + print(x) diff --git a/Tools/cases_generator/plexer.py b/Tools/cases_generator/plexer.py new file mode 100644 index 00000000000000..a73254ed5b1daa --- /dev/null +++ b/Tools/cases_generator/plexer.py @@ -0,0 +1,105 @@ +import lexer as lx +Token = lx.Token + + +class PLexer: + def __init__(self, src: str, filename: str): + self.src = src + self.filename = filename + self.tokens = list(lx.tokenize(self.src, filename=filename)) + self.pos = 0 + + def getpos(self) -> int: + # Current position + return self.pos + + def eof(self) -> bool: + # Are we at EOF? + return self.pos >= len(self.tokens) + + def setpos(self, pos: int) -> None: + # Reset position + assert 0 <= pos <= len(self.tokens), (pos, len(self.tokens)) + self.pos = pos + + def backup(self) -> None: + # Back up position by 1 + assert self.pos > 0 + self.pos -= 1 + + def next(self, raw: bool = False) -> Token | None: + # Return next token and advance position; None if at EOF + # TODO: Return synthetic EOF token instead of None? + while self.pos < len(self.tokens): + tok = self.tokens[self.pos] + self.pos += 1 + if raw or tok.kind != "COMMENT": + return tok + return None + + def peek(self, raw: bool = False) -> Token | None: + # Return next token without advancing position + tok = self.next(raw=raw) + self.backup() + return tok + + def maybe(self, kind: str, raw: bool = False) -> Token | None: + # Return next token without advancing position if kind matches + tok = self.peek(raw=raw) + if tok and tok.kind == kind: + return tok + return None + + def expect(self, kind: str) -> Token | None: + # Return next token and advance position if kind matches + tkn = self.next() + if tkn is not None: + if tkn.kind == kind: + return tkn + self.backup() + return None + + def require(self, kind: str) -> Token: + # Return next token and advance position, requiring kind to match + tkn = self.next() + if tkn is not None and tkn.kind == kind: + return tkn + raise self.make_syntax_error(f"Expected {kind!r} but got {tkn and tkn.text!r}", tkn) + + def extract_line(self, lineno: int) -> str: + # Return source line `lineno` (1-based) + lines = self.src.splitlines() + if lineno > len(lines): + return "" + return lines[lineno - 1] + + def make_syntax_error(self, message: str, tkn: Token|None = None) -> SyntaxError: + # Construct a SyntaxError instance from message and token + if tkn is None: + tkn = self.peek() + if tkn is None: + tkn = self.tokens[-1] + return lx.make_syntax_error(message, + self.filename, tkn.line, tkn.column, self.extract_line(tkn.line)) + + +if __name__ == "__main__": + import sys + if sys.argv[1:]: + filename = sys.argv[1] + if filename == "-c" and sys.argv[2:]: + src = sys.argv[2] + filename = "<string>" + else: + with open(filename) as f: + src = f.read() + else: + filename = "<default>" + src = "if (x) { x.foo; // comment\n}" + p = PLexer(src, filename) + while not p.eof(): + tok = p.next(raw=True) + assert tok + left = repr(tok) + right = lx.to_text([tok]).rstrip() + print(f"{left:40.40} {right}") diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index a8687e3470a185..fdf8041e14bbc1 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -348,6 +348,12 @@ def __init__(self): # "goto exit" if there are any. self.return_conversion = [] + # The C statements required to do some operations + # after the end of parsing but before cleaning up. + # These operations may be, for example, memory deallocations which + # can only be done without any error happening during argument parsing. + self.post_parsing = [] + # The C statements required to clean up after the impl call. self.cleanup = [] @@ -713,7 +719,7 @@ def output_templates(self, f): vararg = NO_VARARG pos_only = min_pos = max_pos = min_kw_only = pseudo_args = 0 for i, p in enumerate(parameters, 1): - if p.is_keyword_only() or vararg != NO_VARARG: + if p.is_keyword_only(): assert not p.is_positional_only() if not p.is_optional(): min_kw_only = i - max_pos @@ -820,6 +826,7 @@ def parser_body(prototype, *fields, declarations=''): {modifications} {return_value} = {c_basename}_impl({impl_arguments}); {return_conversion} + {post_parsing} {exit_label} {cleanup} @@ -956,7 +963,7 @@ def parser_body(prototype, *fields, declarations=''): parser_code.append(normalize_snippet(""" %s = PyTuple_New(%s); for (Py_ssize_t i = 0; i < %s; ++i) {{ - PyTuple_SET_ITEM(%s, i, args[%d + i]); + PyTuple_SET_ITEM(%s, i, Py_NewRef(args[%d + i])); }} """ % ( p.converter.parser_name, @@ -1009,13 +1016,14 @@ def parser_body(prototype, *fields, declarations=''): parser_definition = parser_body(parser_prototype, *parser_code) else: - has_optional_kw = (max(pos_only, min_pos) + min_kw_only < len(converters)) + has_optional_kw = (max(pos_only, min_pos) + min_kw_only < len(converters) - int(vararg != NO_VARARG)) if vararg == NO_VARARG: args_declaration = "_PyArg_UnpackKeywords", "%s, %s, %s" % ( min_pos, max_pos, min_kw_only ) + nargs = "nargs" else: args_declaration = "_PyArg_UnpackKeywordsWithVararg", "%s, %s, %s, %s" % ( min_pos, @@ -1023,6 +1031,7 @@ def parser_body(prototype, *fields, declarations=''): min_kw_only, vararg ) + nargs = f"Py_MIN(nargs, {max_pos})" if max_pos else "0" if not new_or_init: flags = "METH_FASTCALL|METH_KEYWORDS" parser_prototype = parser_prototype_fastcall_keywords @@ -1030,8 +1039,7 @@ def parser_body(prototype, *fields, declarations=''): declarations = declare_parser(f) declarations += "\nPyObject *argsbuf[%s];" % len(converters) if has_optional_kw: - pre_buffer = "0" if vararg != NO_VARARG else "nargs" - declarations += "\nPy_ssize_t noptargs = %s + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - %d;" % (pre_buffer, min_pos + min_kw_only) + declarations += "\nPy_ssize_t noptargs = %s + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - %d;" % (nargs, min_pos + min_kw_only) parser_code = [normalize_snippet(""" args = %s(args, nargs, NULL, kwnames, &_parser, %s, argsbuf); if (!args) {{ @@ -1048,7 +1056,7 @@ def parser_body(prototype, *fields, declarations=''): declarations += "\nPyObject * const *fastargs;" declarations += "\nPy_ssize_t nargs = PyTuple_GET_SIZE(args);" if has_optional_kw: - declarations += "\nPy_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - %d;" % (min_pos + min_kw_only) + declarations += "\nPy_ssize_t noptargs = %s + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - %d;" % (nargs, min_pos + min_kw_only) parser_code = [normalize_snippet(""" fastargs = %s(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, %s, argsbuf); if (!fastargs) {{ @@ -1460,6 +1468,7 @@ def render_function(self, clinic, f): template_dict['impl_parameters'] = ", ".join(data.impl_parameters) template_dict['impl_arguments'] = ", ".join(data.impl_arguments) template_dict['return_conversion'] = format_escape("".join(data.return_conversion).rstrip()) + template_dict['post_parsing'] = format_escape("".join(data.post_parsing).rstrip()) template_dict['cleanup'] = format_escape("".join(data.cleanup)) template_dict['return_value'] = data.return_value @@ -1484,6 +1493,7 @@ def render_function(self, clinic, f): return_conversion=template_dict['return_conversion'], initializers=template_dict['initializers'], modifications=template_dict['modifications'], + post_parsing=template_dict['post_parsing'], cleanup=template_dict['cleanup'], ) @@ -2725,6 +2735,10 @@ def _render_non_self(self, parameter, data): # parse_arguments self.parse_argument(data.parse_arguments) + # post_parsing + if post_parsing := self.post_parsing(): + data.post_parsing.append('/* Post parse cleanup for ' + name + ' */\n' + post_parsing.rstrip() + '\n') + # cleanup cleanup = self.cleanup() if cleanup: @@ -2820,6 +2834,14 @@ def modify(self): """ return "" + def post_parsing(self): + """ + The C statements required to do some operations after the end of parsing but before cleaning up. + Return a string containing this code indented at column 0. + If no operation is necessary, return an empty string. + """ + return "" + def cleanup(self): """ The C statements required to clean up after this variable. @@ -3416,10 +3438,10 @@ def converter_init(self, *, accept={str}, encoding=None, zeroes=False): if NoneType in accept and self.c_default == "Py_None": self.c_default = "NULL" - def cleanup(self): + def post_parsing(self): if self.encoding: name = self.name - return "".join(["if (", name, ") {\n PyMem_FREE(", name, ");\n}\n"]) + return f"PyMem_FREE({name});\n" def parse_arg(self, argname, displayname): if self.format_unit == 's': @@ -5190,10 +5212,6 @@ def state_terminal(self, line): def main(argv): import sys - - if sys.version_info.major < 3 or sys.version_info.minor < 3: - sys.exit("Error: clinic.py requires Python 3.3 or greater.") - import argparse cmdline = argparse.ArgumentParser( description="""Preprocessor for CPython C files. diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index 303409cb0077d1..c003c1ab4a23bd 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -82,6 +82,8 @@ def _sizeof_void_p(): Py_TPFLAGS_BASE_EXC_SUBCLASS = (1 << 30) Py_TPFLAGS_TYPE_SUBCLASS = (1 << 31) +#From pycore_frame.h +FRAME_OWNED_BY_CSTACK = 3 MAX_OUTPUT_LEN=1024 @@ -1077,8 +1079,8 @@ def _f_lasti(self): first_instr = self._f_code().field("co_code_adaptive").cast(codeunit_p) return int(prev_instr - first_instr) - def is_entry(self): - return self._f_special("is_entry", bool) + def is_shim(self): + return self._f_special("owner", int) == FRAME_OWNED_BY_CSTACK def previous(self): return self._f_special("previous", PyFramePtr) @@ -1821,14 +1823,14 @@ def print_summary(self): interp_frame = self.get_pyop() while True: if interp_frame: + if interp_frame.is_shim(): + break line = interp_frame.get_truncated_repr(MAX_OUTPUT_LEN) sys.stdout.write('#%i %s\n' % (self.get_index(), line)) if not interp_frame.is_optimized_out(): line = interp_frame.current_line() if line is not None: sys.stdout.write(' %s\n' % line.strip()) - if interp_frame.is_entry(): - break else: sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) break @@ -1845,13 +1847,13 @@ def print_traceback(self): interp_frame = self.get_pyop() while True: if interp_frame: + if interp_frame.is_shim(): + break interp_frame.print_traceback() if not interp_frame.is_optimized_out(): line = interp_frame.current_line() if line is not None: sys.stdout.write(' %s\n' % line.strip()) - if interp_frame.is_entry(): - break else: sys.stdout.write(' (unable to read python frame information)\n') break @@ -2106,6 +2108,8 @@ def invoke(self, args, from_tty): while True: if not pyop_frame: print(UNABLE_READ_INFO_PYTHON_FRAME) + if pyop_frame.is_shim(): + break sys.stdout.write('Locals for %s\n' % (pyop_frame.co_name.proxyval(set()))) @@ -2114,8 +2118,6 @@ def invoke(self, args, from_tty): % (pyop_name.proxyval(set()), pyop_value.get_truncated_repr(MAX_OUTPUT_LEN))) - if pyop_frame.is_entry(): - break pyop_frame = pyop_frame.previous() diff --git a/Tools/msi/test/test_files.wxs b/Tools/msi/test/test_files.wxs index 9127ce894819e5..b5f68faef30e02 100644 --- a/Tools/msi/test/test_files.wxs +++ b/Tools/msi/test/test_files.wxs @@ -1,6 +1,6 @@ <?xml version="1.0" encoding="UTF-8"?> <Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - <?define exts=_testcapi;_ctypes_test;_testbuffer;_testimportmultiple;_testmultiphase;_testconsole;_testinternalcapi ?> + <?define exts=_testcapi;_ctypes_test;_testbuffer;_testimportmultiple;_testmultiphase;_testsinglephase;_testconsole;_testinternalcapi ?> <Fragment> <ComponentGroup Id="test_extensions"> <?foreach ext in $(var.exts)?> diff --git a/Tools/peg_generator/peg_extension/peg_extension.c b/Tools/peg_generator/peg_extension/peg_extension.c index 3ebb7bdd9b38c9..7df134b5ade8bb 100644 --- a/Tools/peg_generator/peg_extension/peg_extension.c +++ b/Tools/peg_generator/peg_extension/peg_extension.c @@ -12,8 +12,7 @@ _build_return_object(mod_ty module, int mode, PyObject *filename_ob, PyArena *ar } else if (mode == 1) { result = PyAST_mod2obj(module); } else { - result = Py_None; - Py_INCREF(result); + result = Py_NewRef(Py_None); } return result; diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index 2e8261a4755b99..81b06f9f7469ab 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -2,7 +2,9 @@ default stats folders. """ +import argparse import collections +import json import os.path import opcode from datetime import date @@ -30,7 +32,104 @@ opmap = {name: i for i, name in enumerate(opname)} opmap = dict(sorted(opmap.items())) -TOTAL = "specialization.deferred", "specialization.hit", "specialization.miss", "execution_count" +TOTAL = "specialization.hit", "specialization.miss", "execution_count" + +def format_ratio(num, den): + """ + Format a ratio as a percentage. When the denominator is 0, returns the empty + string. + """ + if den == 0: + return "" + else: + return f"{num/den:.01%}" + +def join_rows(a_rows, b_rows): + """ + Joins two tables together, side-by-side, where the first column in each is a + common key. + """ + if len(a_rows) == 0 and len(b_rows) == 0: + return [] + + if len(a_rows): + a_ncols = list(set(len(x) for x in a_rows)) + if len(a_ncols) != 1: + raise ValueError("Table a is ragged") + + if len(b_rows): + b_ncols = list(set(len(x) for x in b_rows)) + if len(b_ncols) != 1: + raise ValueError("Table b is ragged") + + if len(a_rows) and len(b_rows) and a_ncols[0] != b_ncols[0]: + raise ValueError("Tables have different widths") + + if len(a_rows): + ncols = a_ncols[0] + else: + ncols = b_ncols[0] + + default = [""] * (ncols - 1) + a_data = {x[0]: x[1:] for x in a_rows} + b_data = {x[0]: x[1:] for x in b_rows} + + if len(a_data) != len(a_rows) or len(b_data) != len(b_rows): + raise ValueError("Duplicate keys") + + # To preserve ordering, use A's keys as is and then add any in B that aren't + # in A + keys = list(a_data.keys()) + [k for k in b_data.keys() if k not in a_data] + return [(k, *a_data.get(k, default), *b_data.get(k, default)) for k in keys] + +def calculate_specialization_stats(family_stats, total): + rows = [] + for key in sorted(family_stats): + if key.startswith("specialization.failure_kinds"): + continue + if key in ("specialization.hit", "specialization.miss"): + label = key[len("specialization."):] + elif key == "execution_count": + continue + elif key in ("specialization.success", "specialization.failure", "specializable"): + continue + elif key.startswith("pair"): + continue + else: + label = key + rows.append((f"{label:>12}", f"{family_stats[key]:>12}", format_ratio(family_stats[key], total))) + return rows + +def calculate_specialization_success_failure(family_stats): + total_attempts = 0 + for key in ("specialization.success", "specialization.failure"): + total_attempts += family_stats.get(key, 0) + rows = [] + if total_attempts: + for key in ("specialization.success", "specialization.failure"): + label = key[len("specialization."):] + label = label[0].upper() + label[1:] + val = family_stats.get(key, 0) + rows.append((label, val, format_ratio(val, total_attempts))) + return rows + +def calculate_specialization_failure_kinds(name, family_stats, defines): + total_failures = family_stats.get("specialization.failure", 0) + failure_kinds = [ 0 ] * 40 + for key in family_stats: + if not key.startswith("specialization.failure_kind"): + continue + _, index = key[:-1].split("[") + index = int(index) + failure_kinds[index] = family_stats[key] + failures = [(value, index) for (index, value) in enumerate(failure_kinds)] + failures.sort(reverse=True) + rows = [] + for value, index in failures: + if not value: + continue + rows.append((kind_to_text(index, defines, name), value, format_ratio(value, total_failures))) + return rows def print_specialization_stats(name, family_stats, defines): if "specializable" not in family_stats: @@ -39,65 +138,66 @@ def print_specialization_stats(name, family_stats, defines): if total == 0: return with Section(name, 3, f"specialization stats for {name} family"): - rows = [] - for key in sorted(family_stats): - if key.startswith("specialization.failure_kinds"): - continue - if key in ("specialization.hit", "specialization.miss"): - label = key[len("specialization."):] - elif key == "execution_count": - label = "unquickened" - elif key in ("specialization.success", "specialization.failure", "specializable"): - continue - elif key.startswith("pair"): - continue - else: - label = key - rows.append((f"{label:>12}", f"{family_stats[key]:>12}", f"{100*family_stats[key]/total:0.1f}%")) + rows = calculate_specialization_stats(family_stats, total) emit_table(("Kind", "Count", "Ratio"), rows) - print_title("Specialization attempts", 4) - total_attempts = 0 - for key in ("specialization.success", "specialization.failure"): - total_attempts += family_stats.get(key, 0) - rows = [] - if total_attempts: - for key in ("specialization.success", "specialization.failure"): - label = key[len("specialization."):] - label = label[0].upper() + label[1:] - val = family_stats.get(key, 0) - rows.append((label, val, f"{100*val/total_attempts:0.1f}%")) + rows = calculate_specialization_success_failure(family_stats) + if rows: + print_title("Specialization attempts", 4) emit_table(("", "Count:", "Ratio:"), rows) - total_failures = family_stats.get("specialization.failure", 0) - failure_kinds = [ 0 ] * 30 - for key in family_stats: - if not key.startswith("specialization.failure_kind"): - continue - _, index = key[:-1].split("[") - index = int(index) - failure_kinds[index] = family_stats[key] - failures = [(value, index) for (index, value) in enumerate(failure_kinds)] - failures.sort(reverse=True) - rows = [] - for value, index in failures: - if not value: - continue - rows.append((kind_to_text(index, defines, name), value, f"{100*value/total_failures:0.1f}%")) - emit_table(("Failure kind", "Count:", "Ratio:"), rows) - -def gather_stats(): - stats = collections.Counter() - for filename in os.listdir(DEFAULT_DIR): - with open(os.path.join(DEFAULT_DIR, filename)) as fd: - for line in fd: - try: - key, value = line.split(":") - except ValueError: - print (f"Unparsable line: '{line.strip()}' in {filename}", file=sys.stderr) - continue - key = key.strip() - value = int(value) - stats[key] += value - return stats + rows = calculate_specialization_failure_kinds(name, family_stats, defines) + emit_table(("Failure kind", "Count:", "Ratio:"), rows) + +def print_comparative_specialization_stats(name, base_family_stats, head_family_stats, defines): + if "specializable" not in base_family_stats: + return + + base_total = sum(base_family_stats.get(kind, 0) for kind in TOTAL) + head_total = sum(head_family_stats.get(kind, 0) for kind in TOTAL) + if base_total + head_total == 0: + return + with Section(name, 3, f"specialization stats for {name} family"): + base_rows = calculate_specialization_stats(base_family_stats, base_total) + head_rows = calculate_specialization_stats(head_family_stats, head_total) + emit_table( + ("Kind", "Base Count", "Base Ratio", "Head Count", "Head Ratio"), + join_rows(base_rows, head_rows) + ) + base_rows = calculate_specialization_success_failure(base_family_stats) + head_rows = calculate_specialization_success_failure(head_family_stats) + rows = join_rows(base_rows, head_rows) + if rows: + print_title("Specialization attempts", 4) + emit_table(("", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), rows) + base_rows = calculate_specialization_failure_kinds(name, base_family_stats, defines) + head_rows = calculate_specialization_failure_kinds(name, head_family_stats, defines) + emit_table( + ("Failure kind", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), + join_rows(base_rows, head_rows) + ) + +def gather_stats(input): + # Note the output of this function must be JSON-serializable + + if os.path.isfile(input): + with open(input, "r") as fd: + return json.load(fd) + elif os.path.isdir(input): + stats = collections.Counter() + for filename in os.listdir(input): + with open(os.path.join(input, filename)) as fd: + for line in fd: + try: + key, value = line.split(":") + except ValueError: + print(f"Unparsable line: '{line.strip()}' in {filename}", file=sys.stderr) + continue + key = key.strip() + value = int(value) + stats[key] += value + stats['__nfiles__'] += 1 + return stats + else: + raise ValueError(f"{input:r} is not a file or directory path") def extract_opcode_stats(stats): opcode_stats = [ {} for _ in range(256) ] @@ -124,7 +224,7 @@ def pretty(defname): return defname.replace("_", " ").lower() def kind_to_text(kind, defines, opname): - if kind < 7: + if kind <= 7: return pretty(defines[kind][0]) if opname.endswith("ATTR"): opname = "ATTR" @@ -141,10 +241,7 @@ def categorized_counts(opcode_stats): not_specialized = 0 specialized_instructions = { op for op in opcode._specialized_instructions - if "__" not in op and "ADAPTIVE" not in op} - adaptive_instructions = { - op for op in opcode._specialized_instructions - if "ADAPTIVE" in op} + if "__" not in op} for i, opcode_stat in enumerate(opcode_stats): if "execution_count" not in opcode_stat: continue @@ -152,8 +249,6 @@ def categorized_counts(opcode_stats): name = opname[i] if "specializable" in opcode_stat: not_specialized += count - elif name in adaptive_instructions: - not_specialized += count elif name in specialized_instructions: miss = opcode_stat.get("specialization.miss", 0) not_specialized += miss @@ -213,50 +308,98 @@ def emit_table(header, rows): print("|", " | ".join(to_str(i) for i in row), "|") print() +def calculate_execution_counts(opcode_stats, total): + counts = [] + for i, opcode_stat in enumerate(opcode_stats): + if "execution_count" in opcode_stat: + count = opcode_stat['execution_count'] + miss = 0 + if "specializable" not in opcode_stat: + miss = opcode_stat.get("specialization.miss") + counts.append((count, opname[i], miss)) + counts.sort(reverse=True) + cumulative = 0 + rows = [] + for (count, name, miss) in counts: + cumulative += count + if miss: + miss = format_ratio(miss, count) + else: + miss = "" + rows.append((name, count, format_ratio(count, total), + format_ratio(cumulative, total), miss)) + return rows + def emit_execution_counts(opcode_stats, total): with Section("Execution counts", summary="execution counts for all instructions"): - counts = [] - for i, opcode_stat in enumerate(opcode_stats): - if "execution_count" in opcode_stat: - count = opcode_stat['execution_count'] - miss = 0 - if "specializable" not in opcode_stat: - miss = opcode_stat.get("specialization.miss") - counts.append((count, opname[i], miss)) - counts.sort(reverse=True) - cumulative = 0 - rows = [] - for (count, name, miss) in counts: - cumulative += count - if miss: - miss = f"{100*miss/count:0.1f}%" - else: - miss = "" - rows.append((name, count, f"{100*count/total:0.1f}%", - f"{100*cumulative/total:0.1f}%", miss)) + rows = calculate_execution_counts(opcode_stats, total) emit_table( ("Name", "Count:", "Self:", "Cumulative:", "Miss ratio:"), rows ) +def emit_comparative_execution_counts( + base_opcode_stats, base_total, head_opcode_stats, head_total +): + with Section("Execution counts", summary="execution counts for all instructions"): + base_rows = calculate_execution_counts(base_opcode_stats, base_total) + head_rows = calculate_execution_counts(head_opcode_stats, head_total) + base_data = dict((x[0], x[1:]) for x in base_rows) + head_data = dict((x[0], x[1:]) for x in head_rows) + opcodes = set(base_data.keys()) | set(head_data.keys()) -def emit_specialization_stats(opcode_stats): + rows = [] + default = [0, "0.0%", "0.0%", 0] + for opcode in opcodes: + base_entry = base_data.get(opcode, default) + head_entry = head_data.get(opcode, default) + if base_entry[0] == 0: + change = 1 + else: + change = (head_entry[0] - base_entry[0]) / base_entry[0] + rows.append( + (opcode, base_entry[0], head_entry[0], + f"{100*change:0.1f}%")) + + rows.sort(key=lambda x: -abs(float(x[-1][:-1]))) + + emit_table( + ("Name", "Base Count:", "Head Count:", "Change:"), + rows + ) + +def get_defines(): spec_path = os.path.join(os.path.dirname(__file__), "../../Python/specialize.c") with open(spec_path) as spec_src: defines = parse_kinds(spec_src) + return defines + +def emit_specialization_stats(opcode_stats): + defines = get_defines() with Section("Specialization stats", summary="specialization stats by family"): for i, opcode_stat in enumerate(opcode_stats): name = opname[i] print_specialization_stats(name, opcode_stat, defines) -def emit_specialization_overview(opcode_stats, total): +def emit_comparative_specialization_stats(base_opcode_stats, head_opcode_stats): + defines = get_defines() + with Section("Specialization stats", summary="specialization stats by family"): + for i, (base_opcode_stat, head_opcode_stat) in enumerate(zip(base_opcode_stats, head_opcode_stats)): + name = opname[i] + print_comparative_specialization_stats(name, base_opcode_stat, head_opcode_stat, defines) + +def calculate_specialization_effectiveness(opcode_stats, total): basic, not_specialized, specialized = categorized_counts(opcode_stats) + return [ + ("Basic", basic, format_ratio(basic, total)), + ("Not specialized", not_specialized, format_ratio(not_specialized, total)), + ("Specialized", specialized, format_ratio(specialized, total)), + ] + +def emit_specialization_overview(opcode_stats, total): with Section("Specialization effectiveness"): - emit_table(("Instructions", "Count:", "Ratio:"), ( - ("Basic", basic, f"{basic*100/total:0.1f}%"), - ("Not specialized", not_specialized, f"{not_specialized*100/total:0.1f}%"), - ("Specialized", specialized, f"{specialized*100/total:0.1f}%"), - )) + rows = calculate_specialization_effectiveness(opcode_stats, total) + emit_table(("Instructions", "Count:", "Ratio:"), rows) for title, field in (("Deferred", "specialization.deferred"), ("Misses", "specialization.miss")): total = 0 counts = [] @@ -267,56 +410,94 @@ def emit_specialization_overview(opcode_stats, total): counts.sort(reverse=True) if total: with Section(f"{title} by instruction", 3): - rows = [ (name, count, f"{100*count/total:0.1f}%") for (count, name) in counts[:10] ] + rows = [ (name, count, format_ratio(count, total)) for (count, name) in counts[:10] ] emit_table(("Name", "Count:", "Ratio:"), rows) -def emit_call_stats(stats): +def emit_comparative_specialization_overview(base_opcode_stats, base_total, head_opcode_stats, head_total): + with Section("Specialization effectiveness"): + base_rows = calculate_specialization_effectiveness(base_opcode_stats, base_total) + head_rows = calculate_specialization_effectiveness(head_opcode_stats, head_total) + emit_table( + ("Instructions", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), + join_rows(base_rows, head_rows) + ) + +def get_stats_defines(): stats_path = os.path.join(os.path.dirname(__file__), "../../Include/pystats.h") with open(stats_path) as stats_src: defines = parse_kinds(stats_src, prefix="EVAL_CALL") + return defines + +def calculate_call_stats(stats): + defines = get_stats_defines() + total = 0 + for key, value in stats.items(): + if "Calls to" in key: + total += value + rows = [] + for key, value in stats.items(): + if "Calls to" in key: + rows.append((key, value, format_ratio(value, total))) + elif key.startswith("Calls "): + name, index = key[:-1].split("[") + index = int(index) + label = name + " (" + pretty(defines[index][0]) + ")" + rows.append((label, value, format_ratio(value, total))) + for key, value in stats.items(): + if key.startswith("Frame"): + rows.append((key, value, format_ratio(value, total))) + return rows + +def emit_call_stats(stats): with Section("Call stats", summary="Inlined calls and frame stats"): - total = 0 - for key, value in stats.items(): - if "Calls to" in key: - total += value - rows = [] - for key, value in stats.items(): - if "Calls to" in key: - rows.append((key, value, f"{100*value/total:0.1f}%")) - elif key.startswith("Calls "): - name, index = key[:-1].split("[") - index = int(index) - label = name + " (" + pretty(defines[index][0]) + ")" - rows.append((label, value, f"{100*value/total:0.1f}%")) - for key, value in stats.items(): - if key.startswith("Frame"): - rows.append((key, value, f"{100*value/total:0.1f}%")) + rows = calculate_call_stats(stats) emit_table(("", "Count:", "Ratio:"), rows) +def emit_comparative_call_stats(base_stats, head_stats): + with Section("Call stats", summary="Inlined calls and frame stats"): + base_rows = calculate_call_stats(base_stats) + head_rows = calculate_call_stats(head_stats) + rows = join_rows(base_rows, head_rows) + rows.sort(key=lambda x: -float(x[-1][:-1])) + emit_table( + ("", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), + rows + ) + +def calculate_object_stats(stats): + total_materializations = stats.get("Object new values") + total_allocations = stats.get("Object allocations") + stats.get("Object allocations from freelist") + total_increfs = stats.get("Object interpreter increfs") + stats.get("Object increfs") + total_decrefs = stats.get("Object interpreter decrefs") + stats.get("Object decrefs") + rows = [] + for key, value in stats.items(): + if key.startswith("Object"): + if "materialize" in key: + ratio = format_ratio(value, total_materializations) + elif "allocations" in key: + ratio = format_ratio(value, total_allocations) + elif "increfs" in key: + ratio = format_ratio(value, total_increfs) + elif "decrefs" in key: + ratio = format_ratio(value, total_decrefs) + else: + ratio = "" + label = key[6:].strip() + label = label[0].upper() + label[1:] + rows.append((label, value, ratio)) + return rows + def emit_object_stats(stats): with Section("Object stats", summary="allocations, frees and dict materializatons"): - total_materializations = stats.get("Object new values") - total_allocations = stats.get("Object allocations") + stats.get("Object allocations from freelist") - total_increfs = stats.get("Object interpreter increfs") + stats.get("Object increfs") - total_decrefs = stats.get("Object interpreter decrefs") + stats.get("Object decrefs") - rows = [] - for key, value in stats.items(): - if key.startswith("Object"): - if "materialize" in key: - ratio = f"{100*value/total_materializations:0.1f}%" - elif "allocations" in key: - ratio = f"{100*value/total_allocations:0.1f}%" - elif "increfs" in key: - ratio = f"{100*value/total_increfs:0.1f}%" - elif "decrefs" in key: - ratio = f"{100*value/total_decrefs:0.1f}%" - else: - ratio = "" - label = key[6:].strip() - label = label[0].upper() + label[1:] - rows.append((label, value, ratio)) + rows = calculate_object_stats(stats) emit_table(("", "Count:", "Ratio:"), rows) +def emit_comparative_object_stats(base_stats, head_stats): + with Section("Object stats", summary="allocations, frees and dict materializatons"): + base_rows = calculate_object_stats(base_stats) + head_rows = calculate_object_stats(head_stats) + emit_table(("", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), join_rows(base_rows, head_rows)) + def get_total(opcode_stats): total = 0 for opcode_stat in opcode_stats: @@ -341,8 +522,8 @@ def emit_pair_counts(opcode_stats, total): for (count, pair) in itertools.islice(pair_counts, 100): i, j = pair cumulative += count - rows.append((opname[i] + " " + opname[j], count, f"{100*count/total:0.1f}%", - f"{100*cumulative/total:0.1f}%")) + rows.append((opname[i] + " " + opname[j], count, format_ratio(count, total), + format_ratio(cumulative, total))) emit_table(("Pair", "Count:", "Self:", "Cumulative:"), rows ) @@ -377,8 +558,7 @@ def emit_pair_counts(opcode_stats, total): succ_rows ) -def main(): - stats = gather_stats() +def output_single_stats(stats): opcode_stats = extract_opcode_stats(stats) total = get_total(opcode_stats) emit_execution_counts(opcode_stats, total) @@ -387,8 +567,79 @@ def main(): emit_specialization_overview(opcode_stats, total) emit_call_stats(stats) emit_object_stats(stats) + with Section("Meta stats", summary="Meta statistics"): + emit_table(("", "Count:"), [('Number of data files', stats['__nfiles__'])]) + + +def output_comparative_stats(base_stats, head_stats): + base_opcode_stats = extract_opcode_stats(base_stats) + base_total = get_total(base_opcode_stats) + + head_opcode_stats = extract_opcode_stats(head_stats) + head_total = get_total(head_opcode_stats) + + emit_comparative_execution_counts( + base_opcode_stats, base_total, head_opcode_stats, head_total + ) + emit_comparative_specialization_stats( + base_opcode_stats, head_opcode_stats + ) + emit_comparative_specialization_overview( + base_opcode_stats, base_total, head_opcode_stats, head_total + ) + emit_comparative_call_stats(base_stats, head_stats) + emit_comparative_object_stats(base_stats, head_stats) + +def output_stats(inputs, json_output=None): + if len(inputs) == 1: + stats = gather_stats(inputs[0]) + if json_output is not None: + json.dump(stats, json_output) + output_single_stats(stats) + elif len(inputs) == 2: + if json_output is not None: + raise ValueError( + "Can not output to JSON when there are multiple inputs" + ) + + base_stats = gather_stats(inputs[0]) + head_stats = gather_stats(inputs[1]) + output_comparative_stats(base_stats, head_stats) + print("---") print("Stats gathered on:", date.today()) +def main(): + parser = argparse.ArgumentParser(description="Summarize pystats results") + + parser.add_argument( + "inputs", + nargs="*", + type=str, + default=[DEFAULT_DIR], + help=f""" + Input source(s). + For each entry, if a .json file, the output provided by --json-output from a previous run; + if a directory, a directory containing raw pystats .txt files. + If one source is provided, its stats are printed. + If two sources are provided, comparative stats are printed. + Default is {DEFAULT_DIR}. + """ + ) + + parser.add_argument( + "--json-output", + nargs="?", + type=argparse.FileType("w"), + help="Output complete raw results to the given JSON file." + ) + + args = parser.parse_args() + + if len(args.inputs) > 2: + raise ValueError("0-2 arguments may be provided.") + + output_stats(args.inputs, json_output=args.json_output) + if __name__ == "__main__": main() diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index d64b4f66f5192b..30d66964fd1dcf 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -46,8 +46,8 @@ ] OPENSSL_RECENT_VERSIONS = [ - "1.1.1q", - "3.0.5" + "1.1.1s", + "3.0.7" ] LIBRESSL_OLD_VERSIONS = [ diff --git a/Tools/wasm/README.md b/Tools/wasm/README.md index fe9a1dc99b30f4..f8dd6e675ea7f1 100644 --- a/Tools/wasm/README.md +++ b/Tools/wasm/README.md @@ -157,7 +157,7 @@ functions. - Threading is disabled by default. The ``configure`` option ``--enable-wasm-pthreads`` adds compiler flag ``-pthread`` and - linker flags ``-sUSE_PTHREADS -sPROXY_TO_PTHREAD``. + linker flags ``-sUSE_PTHREADS -sPROXY_TO_PTHREAD``. - pthread support requires WASM threads and SharedArrayBuffer (bulk memory). The Node.JS runtime keeps a pool of web workers around. Each web worker uses several file descriptors (eventfd, epoll, pipe). @@ -203,7 +203,7 @@ functions. - The interactive shell does not handle copy 'n paste and unicode support well. - The bundled stdlib is limited. Network-related modules, - distutils, multiprocessing, dbm, tests and similar modules + multiprocessing, dbm, tests and similar modules are not shipped. All other modules are bundled as pre-compiled ``pyc`` files. - In-memory file system (MEMFS) is not persistent and limited. @@ -283,7 +283,7 @@ popd ## WASI limitations and issues (WASI SDK 15.0) -A lot of Emscripten limitations also apply to WASI. Noticable restrictions +A lot of Emscripten limitations also apply to WASI. Noticeable restrictions are: - Call stack size is limited. Default recursion limit and parser stack size diff --git a/Tools/wasm/config.site-wasm32-emscripten b/Tools/wasm/config.site-wasm32-emscripten index b695a7bf8f04da..1471546a5eec17 100644 --- a/Tools/wasm/config.site-wasm32-emscripten +++ b/Tools/wasm/config.site-wasm32-emscripten @@ -14,9 +14,6 @@ ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptmx=no ac_cv_file__dev_ptc=no -# dummy readelf, Emscripten build does not need readelf. -ac_cv_prog_ac_ct_READELF=true - # new undefined symbols / unsupported features ac_cv_func_posix_spawn=no ac_cv_func_posix_spawnp=no diff --git a/Tools/wasm/config.site-wasm32-wasi b/Tools/wasm/config.site-wasm32-wasi index 893a0d132cda59..4b8df2229915cc 100644 --- a/Tools/wasm/config.site-wasm32-wasi +++ b/Tools/wasm/config.site-wasm32-wasi @@ -9,9 +9,6 @@ ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptmx=no ac_cv_file__dev_ptc=no -# dummy readelf, WASI build does not need readelf. -ac_cv_prog_ac_ct_READELF=true - # get/setrlimit are not supported ac_cv_header_sys_resource_h=no diff --git a/Tools/wasm/wasm_assets.py b/Tools/wasm/wasm_assets.py index 98a2841ce2b0e0..9dc8bda4017e2c 100755 --- a/Tools/wasm/wasm_assets.py +++ b/Tools/wasm/wasm_assets.py @@ -41,16 +41,12 @@ "ensurepip/", "venv/", # build system - "distutils/", "lib2to3/", # deprecated - "asyncore.py", - "asynchat.py", "uu.py", "xdrlib.py", # other platforms "_aix_support.py", - "_bootsubprocess.py", "_osx_support.py", # webbrowser "antigravity.py", diff --git a/Tools/wasm/wasm_build.py b/Tools/wasm/wasm_build.py index 63812c6f3153f2..493682c5b138a3 100755 --- a/Tools/wasm/wasm_build.py +++ b/Tools/wasm/wasm_build.py @@ -137,7 +137,7 @@ def read_python_version(configure: pathlib.Path = CONFIGURE) -> str: configure and configure.ac are the canonical source for major and minor version number. """ - version_re = re.compile("^PACKAGE_VERSION='(\d\.\d+)'") + version_re = re.compile(r"^PACKAGE_VERSION='(\d\.\d+)'") with configure.open(encoding="utf-8") as f: for line in f: mo = version_re.match(line) diff --git a/configure b/configure index 953c558d6048d4..3f8daf9dad5fd8 100755 --- a/configure +++ b/configure @@ -642,6 +642,8 @@ MODULE__TESTBUFFER_FALSE MODULE__TESTBUFFER_TRUE MODULE__TESTINTERNALCAPI_FALSE MODULE__TESTINTERNALCAPI_TRUE +MODULE__TESTCLINIC_FALSE +MODULE__TESTCLINIC_TRUE MODULE__TESTCAPI_FALSE MODULE__TESTCAPI_TRUE MODULE__HASHLIB_FALSE @@ -904,8 +906,6 @@ MKDIR_P INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM -ac_ct_READELF -READELF ARFLAGS ac_ct_AR AR @@ -3361,7 +3361,7 @@ fi -for ac_prog in python$PACKAGE_VERSION python3.10 python3.9 python3.8 python3.7 python3.6 python3 python +for ac_prog in python$PACKAGE_VERSION python3.11 python3.10 python3.9 python3.8 python3.7 python3.6 python3 python do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 @@ -6755,7 +6755,7 @@ if test "x$enable_profiling" = xyes; then CC="$CC -pg" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -int main() { return 0; } +int main(void) { return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : @@ -7181,116 +7181,6 @@ then ARFLAGS="rcs" fi -if test -n "$ac_tool_prefix"; then - for ac_prog in readelf - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_READELF+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$READELF"; then - ac_cv_prog_READELF="$READELF" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_READELF="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -READELF=$ac_cv_prog_READELF -if test -n "$READELF"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $READELF" >&5 -$as_echo "$READELF" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$READELF" && break - done -fi -if test -z "$READELF"; then - ac_ct_READELF=$READELF - for ac_prog in readelf -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_READELF+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_READELF"; then - ac_cv_prog_ac_ct_READELF="$ac_ct_READELF" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_READELF="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_READELF=$ac_cv_prog_ac_ct_READELF -if test -n "$ac_ct_READELF"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_READELF" >&5 -$as_echo "$ac_ct_READELF" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_READELF" && break -done - - if test "x$ac_ct_READELF" = x; then - READELF=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - READELF=$ac_ct_READELF - fi -fi - -if test "$cross_compiling" = yes; then - case "$READELF" in - readelf|:) - as_fn_error $? "readelf for the host is required for cross builds" "$LINENO" 5 - ;; - esac -fi - - - case $MACHDEP in hp*|HP*) # install -d does not work on HP-UX @@ -9330,7 +9220,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9385,7 +9275,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9434,7 +9324,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9483,7 +9373,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9558,7 +9448,7 @@ for ac_header in \ alloca.h asm/types.h bluetooth.h conio.h crypt.h direct.h dlfcn.h endian.h errno.h fcntl.h grp.h \ ieeefp.h io.h langinfo.h libintl.h libutil.h linux/auxvec.h sys/auxv.h linux/fs.h linux/memfd.h \ linux/random.h linux/soundcard.h \ - linux/tipc.h linux/wait.h netdb.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ + linux/tipc.h linux/wait.h netdb.h net/ethernet.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ sched.h setjmp.h shadow.h signal.h spawn.h stropts.h sys/audioio.h sys/bsdtty.h sys/devpoll.h \ sys/endian.h sys/epoll.h sys/event.h sys/eventfd.h sys/file.h sys/ioctl.h sys/kern_control.h \ sys/loadavg.h sys/lock.h sys/memfd.h sys/mkdev.h sys/mman.h sys/modem.h sys/param.h sys/poll.h \ @@ -12343,7 +12233,7 @@ else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -int main() +int main(void) { char s[16]; int i, *p1, *p2; @@ -12619,7 +12509,7 @@ else LIBEXPAT_CFLAGS="-I\$(srcdir)/Modules/expat" LIBEXPAT_LDFLAGS="-lm \$(LIBEXPAT_A)" - LIBEXPAT_INTERNAL="\$(LIBEXPAT_A)" + LIBEXPAT_INTERNAL="\$(LIBEXPAT_HEADERS) \$(LIBEXPAT_A)" fi @@ -13128,7 +13018,7 @@ else LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" LIBMPDEC_LDFLAGS="-lm \$(LIBMPDEC_A)" - LIBMPDEC_INTERNAL="\$(LIBMPDEC_A)" + LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" if test "x$with_pydebug" = xyes; then : @@ -15152,6 +15042,7 @@ $as_echo_n "checking for pthread_create in -lpthread... " >&6; } /* end confdefs.h. */ #include <stdio.h> +#include <stdlib.h> #include <pthread.h> void * start_routine (void *arg) { exit (0); } @@ -15462,7 +15353,7 @@ else void *foo(void *parm) { return NULL; } - main() { + int main(void) { pthread_attr_t attr; pthread_t id; if (pthread_attr_init(&attr)) return (-1); @@ -17064,7 +16955,7 @@ else #include <sys/stat.h> #include <unistd.h> -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(chflags(argv[0], 0) != 0) return 1; @@ -17113,7 +17004,7 @@ else #include <sys/stat.h> #include <unistd.h> -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(lchflags(argv[0], 0) != 0) return 1; @@ -19821,7 +19712,7 @@ else #include <sys/socket.h> #include <netinet/in.h> -int main() +int main(void) { int passive, gaierr, inet4 = 0, inet6 = 0; struct addrinfo hints, *ai, *aitop; @@ -21018,7 +20909,7 @@ else #include <stdlib.h> #include <math.h> -int main() { +int main(void) { volatile double x, y, z; /* 1./(1-2**-53) -> 1+2**-52 (correct), 1.0 (double rounding) */ x = 0.99999999999999989; /* 1-2**-53 */ @@ -21797,7 +21688,7 @@ else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -int main() +int main(void) { return (((-1)>>3 == -1) ? 0 : 1); } @@ -22699,7 +22590,7 @@ else #include <stdlib.h> #include <unistd.h> -int main() +int main(void) { int val1 = nice(1); if (val1 != -1 && val1 == nice(2)) @@ -22741,7 +22632,7 @@ else #include <poll.h> #include <unistd.h> -int main() +int main(void) { struct pollfd poll_struct = { 42, POLLIN|POLLPRI|POLLOUT, 0 }; int poll_test; @@ -22798,7 +22689,7 @@ else extern char *tzname[]; #endif -int main() +int main(void) { /* Note that we need to ensure that not only does tzset(3) do 'something' with localtime, but it works as documented @@ -24460,9 +24351,10 @@ else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ +#include <stddef.h> #include <stdio.h> -#include<stdlib.h> -int main() { +#include <stdlib.h> +int main(void) { size_t len = -1; const char *str = "text"; len = mbstowcs(NULL, str, 0); @@ -24662,7 +24554,7 @@ else #include <stdlib.h> #include <string.h> void foo(void *p, void *q) { memmove(p, q, 19); } -int main() { +int main(void) { char a[32] = "123456789000000000"; foo(&a[9], a); if (strcmp(a, "123456789123456789000000000") != 0) @@ -24717,7 +24609,7 @@ else ); return r; } - int main() { + int main(void) { int p = 8; if ((foo(&p) ? : p) != 6) return 1; @@ -24760,7 +24652,7 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext #include <stdatomic.h> atomic_int int_var; atomic_uintptr_t uintptr_var; - int main() { + int main(void) { atomic_store_explicit(&int_var, 5, memory_order_relaxed); atomic_store_explicit(&uintptr_var, 0, memory_order_relaxed); int loaded_value = atomic_load_explicit(&int_var, memory_order_seq_cst); @@ -24801,7 +24693,7 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext int val; - int main() { + int main(void) { __atomic_store_n(&val, 1, __ATOMIC_SEQ_CST); (void)__atomic_load_n(&val, __ATOMIC_SEQ_CST); return 0; @@ -24877,7 +24769,7 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext #include <dirent.h> - int main() { + int main(void) { struct dirent entry; return entry.d_type == DT_UNKNOWN; } @@ -24915,11 +24807,12 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ + #include <stddef.h> #include <unistd.h> #include <sys/syscall.h> #include <linux/random.h> - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = GRND_NONBLOCK; @@ -24962,9 +24855,10 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ + #include <stddef.h> #include <sys/random.h> - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = 0; @@ -27859,6 +27753,40 @@ fi $as_echo "$py_cv_module__testcapi" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdlib extension module _testclinic" >&5 +$as_echo_n "checking for stdlib extension module _testclinic... " >&6; } + if test "$py_cv_module__testclinic" != "n/a"; then : + + if test "$TEST_MODULES" = yes; then : + if true; then : + py_cv_module__testclinic=yes +else + py_cv_module__testclinic=missing +fi +else + py_cv_module__testclinic=disabled +fi + +fi + as_fn_append MODULE_BLOCK "MODULE__TESTCLINIC_STATE=$py_cv_module__testclinic$as_nl" + if test "x$py_cv_module__testclinic" = xyes; then : + + + + +fi + if test "$py_cv_module__testclinic" = yes; then + MODULE__TESTCLINIC_TRUE= + MODULE__TESTCLINIC_FALSE='#' +else + MODULE__TESTCLINIC_TRUE='#' + MODULE__TESTCLINIC_FALSE= +fi + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $py_cv_module__testclinic" >&5 +$as_echo "$py_cv_module__testclinic" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdlib extension module _testinternalcapi" >&5 $as_echo_n "checking for stdlib extension module _testinternalcapi... " >&6; } if test "$py_cv_module__testinternalcapi" != "n/a"; then : @@ -28578,6 +28506,10 @@ if test -z "${MODULE__TESTCAPI_TRUE}" && test -z "${MODULE__TESTCAPI_FALSE}"; th as_fn_error $? "conditional \"MODULE__TESTCAPI\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi +if test -z "${MODULE__TESTCLINIC_TRUE}" && test -z "${MODULE__TESTCLINIC_FALSE}"; then + as_fn_error $? "conditional \"MODULE__TESTCLINIC\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi if test -z "${MODULE__TESTINTERNALCAPI_TRUE}" && test -z "${MODULE__TESTINTERNALCAPI_FALSE}"; then as_fn_error $? "conditional \"MODULE__TESTINTERNALCAPI\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 diff --git a/configure.ac b/configure.ac index 210ce3292cfdf7..734a4db8389915 100644 --- a/configure.ac +++ b/configure.ac @@ -203,7 +203,7 @@ AC_SUBST([FREEZE_MODULE_DEPS]) AC_SUBST([PYTHON_FOR_BUILD_DEPS]) AC_CHECK_PROGS([PYTHON_FOR_REGEN], - [python$PACKAGE_VERSION python3.10 python3.9 python3.8 python3.7 python3.6 python3 python], + [python$PACKAGE_VERSION python3.11 python3.10 python3.9 python3.8 python3.7 python3.6 python3 python], [python3]) AC_SUBST(PYTHON_FOR_REGEN) @@ -1427,7 +1427,7 @@ AC_ARG_ENABLE(profiling, if test "x$enable_profiling" = xyes; then ac_save_cc="$CC" CC="$CC -pg" - AC_LINK_IFELSE([AC_LANG_SOURCE([[int main() { return 0; }]])], + AC_LINK_IFELSE([AC_LANG_SOURCE([[int main(void) { return 0; }]])], [], [enable_profiling=no]) CC="$ac_save_cc" @@ -1617,17 +1617,6 @@ then ARFLAGS="rcs" fi -AC_CHECK_TOOLS([READELF], [readelf], [:]) -if test "$cross_compiling" = yes; then - case "$READELF" in - readelf|:) - AC_MSG_ERROR([readelf for the host is required for cross builds]) - ;; - esac -fi -AC_SUBST(READELF) - - case $MACHDEP in hp*|HP*) # install -d does not work on HP-UX @@ -2564,7 +2553,7 @@ AC_CACHE_CHECK([whether pthreads are available without options], void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2597,7 +2586,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2624,7 +2613,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2651,7 +2640,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2719,7 +2708,7 @@ AC_CHECK_HEADERS([ \ alloca.h asm/types.h bluetooth.h conio.h crypt.h direct.h dlfcn.h endian.h errno.h fcntl.h grp.h \ ieeefp.h io.h langinfo.h libintl.h libutil.h linux/auxvec.h sys/auxv.h linux/fs.h linux/memfd.h \ linux/random.h linux/soundcard.h \ - linux/tipc.h linux/wait.h netdb.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ + linux/tipc.h linux/wait.h netdb.h net/ethernet.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ sched.h setjmp.h shadow.h signal.h spawn.h stropts.h sys/audioio.h sys/bsdtty.h sys/devpoll.h \ sys/endian.h sys/epoll.h sys/event.h sys/eventfd.h sys/file.h sys/ioctl.h sys/kern_control.h \ sys/loadavg.h sys/lock.h sys/memfd.h sys/mkdev.h sys/mman.h sys/modem.h sys/param.h sys/poll.h \ @@ -3589,7 +3578,7 @@ esac # check for systems that require aligned memory access AC_CACHE_CHECK([aligned memory access is required], [ac_cv_aligned_required], [AC_RUN_IFELSE([AC_LANG_SOURCE([[ -int main() +int main(void) { char s[16]; int i, *p1, *p2; @@ -3710,7 +3699,7 @@ AS_VAR_IF([with_system_expat], [yes], [ ], [ LIBEXPAT_CFLAGS="-I\$(srcdir)/Modules/expat" LIBEXPAT_LDFLAGS="-lm \$(LIBEXPAT_A)" - LIBEXPAT_INTERNAL="\$(LIBEXPAT_A)" + LIBEXPAT_INTERNAL="\$(LIBEXPAT_HEADERS) \$(LIBEXPAT_A)" ]) AC_SUBST([LIBEXPAT_CFLAGS]) @@ -3819,7 +3808,7 @@ AS_VAR_IF([with_system_libmpdec], [yes], [ ], [ LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" LIBMPDEC_LDFLAGS="-lm \$(LIBMPDEC_A)" - LIBMPDEC_INTERNAL="\$(LIBMPDEC_A)" + LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" dnl Disable forced inlining in debug builds, see GH-94847 AS_VAR_IF([with_pydebug], [yes], [ @@ -4303,6 +4292,7 @@ yes AC_MSG_CHECKING([for pthread_create in -lpthread]) AC_LINK_IFELSE([AC_LANG_PROGRAM([[ #include <stdio.h> +#include <stdlib.h> #include <pthread.h> void * start_routine (void *arg) { exit (0); }]], [[ @@ -4372,7 +4362,7 @@ if test "$posix_threads" = "yes"; then void *foo(void *parm) { return NULL; } - main() { + int main(void) { pthread_attr_t attr; pthread_t id; if (pthread_attr_init(&attr)) return (-1); @@ -4909,7 +4899,7 @@ AC_CACHE_CHECK([for chflags], [ac_cv_have_chflags], [dnl AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include <sys/stat.h> #include <unistd.h> -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(chflags(argv[0], 0) != 0) return 1; @@ -4931,7 +4921,7 @@ AC_CACHE_CHECK([for lchflags], [ac_cv_have_lchflags], [dnl AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include <sys/stat.h> #include <unistd.h> -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(lchflags(argv[0], 0) != 0) return 1; @@ -5207,7 +5197,7 @@ AS_VAR_IF([ac_cv_func_getaddrinfo], [yes], [ #include <sys/socket.h> #include <netinet/in.h> -int main() +int main(void) { int passive, gaierr, inet4 = 0, inet6 = 0; struct addrinfo hints, *ai, *aitop; @@ -5623,7 +5613,7 @@ CC="$CC $BASECFLAGS" AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include <stdlib.h> #include <math.h> -int main() { +int main(void) { volatile double x, y, z; /* 1./(1-2**-53) -> 1+2**-52 (correct), 1.0 (double rounding) */ x = 0.99999999999999989; /* 1-2**-53 */ @@ -5930,7 +5920,7 @@ fi], # or fills with zeros (like the Cray J90, according to Tim Peters). AC_CACHE_CHECK([whether right shift extends the sign bit], [ac_cv_rshift_extends_sign], [ AC_RUN_IFELSE([AC_LANG_SOURCE([[ -int main() +int main(void) { return (((-1)>>3 == -1) ? 0 : 1); } @@ -6129,7 +6119,7 @@ AC_CACHE_CHECK([for broken nice()], [ac_cv_broken_nice], [ AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include <stdlib.h> #include <unistd.h> -int main() +int main(void) { int val1 = nice(1); if (val1 != -1 && val1 == nice(2)) @@ -6151,7 +6141,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include <poll.h> #include <unistd.h> -int main() +int main(void) { struct pollfd poll_struct = { 42, POLLIN|POLLPRI|POLLOUT, 0 }; int poll_test; @@ -6187,7 +6177,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ extern char *tzname[]; #endif -int main() +int main(void) { /* Note that we need to ensure that not only does tzset(3) do 'something' with localtime, but it works as documented @@ -6528,9 +6518,10 @@ AC_CHECK_TYPE(socklen_t,, AC_CACHE_CHECK([for broken mbstowcs], [ac_cv_broken_mbstowcs], AC_RUN_IFELSE([AC_LANG_SOURCE([[ +#include <stddef.h> #include <stdio.h> -#include<stdlib.h> -int main() { +#include <stdlib.h> +int main(void) { size_t len = -1; const char *str = "text"; len = mbstowcs(NULL, str, 0); @@ -6657,7 +6648,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include <stdlib.h> #include <string.h> void foo(void *p, void *q) { memmove(p, q, 19); } -int main() { +int main(void) { char a[32] = "123456789000000000"; foo(&a[9], a); if (strcmp(a, "123456789123456789000000000") != 0) @@ -6698,7 +6689,7 @@ if test "$ac_cv_gcc_asm_for_x87" = yes; then ); return r; } - int main() { + int main(void) { int p = 8; if ((foo(&p) ? : p) != 6) return 1; @@ -6726,7 +6717,7 @@ AC_LINK_IFELSE( #include <stdatomic.h> atomic_int int_var; atomic_uintptr_t uintptr_var; - int main() { + int main(void) { atomic_store_explicit(&int_var, 5, memory_order_relaxed); atomic_store_explicit(&uintptr_var, 0, memory_order_relaxed); int loaded_value = atomic_load_explicit(&int_var, memory_order_seq_cst); @@ -6747,7 +6738,7 @@ AC_LINK_IFELSE( [ AC_LANG_SOURCE([[ int val; - int main() { + int main(void) { __atomic_store_n(&val, 1, __ATOMIC_SEQ_CST); (void)__atomic_load_n(&val, __ATOMIC_SEQ_CST); return 0; @@ -6788,7 +6779,7 @@ AC_LINK_IFELSE( AC_LANG_SOURCE([[ #include <dirent.h> - int main() { + int main(void) { struct dirent entry; return entry.d_type == DT_UNKNOWN; } @@ -6806,11 +6797,12 @@ AC_CACHE_CHECK([for the Linux getrandom() syscall], [ac_cv_getrandom_syscall], [ AC_LINK_IFELSE( [ AC_LANG_SOURCE([[ + #include <stddef.h> #include <unistd.h> #include <sys/syscall.h> #include <linux/random.h> - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = GRND_NONBLOCK; @@ -6833,9 +6825,10 @@ AC_CACHE_CHECK([for the getrandom() function], [ac_cv_func_getrandom], [ AC_LINK_IFELSE( [ AC_LANG_SOURCE([[ + #include <stddef.h> #include <sys/random.h> - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = 0; @@ -7361,6 +7354,7 @@ PY_STDLIB_MOD([_hashlib], [], [test "$ac_cv_working_openssl_hashlib" = yes], dnl test modules PY_STDLIB_MOD([_testcapi], [test "$TEST_MODULES" = yes]) +PY_STDLIB_MOD([_testclinic], [test "$TEST_MODULES" = yes]) PY_STDLIB_MOD([_testinternalcapi], [test "$TEST_MODULES" = yes]) PY_STDLIB_MOD([_testbuffer], [test "$TEST_MODULES" = yes]) PY_STDLIB_MOD([_testimportmultiple], [test "$TEST_MODULES" = yes], [test "$ac_cv_func_dlopen" = yes]) diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 00000000000000..f5790fc5fec74f --- /dev/null +++ b/netlify.toml @@ -0,0 +1,11 @@ +[build] + base = "Doc/" + command = "make html" + publish = "build/html" + # Do not trigger netlify builds if docs were not changed. + # Changed files should be in sync with `.github/workflows/doc.yml` + ignore = "git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../netlify.toml" + +[build.environment] + PYTHON_VERSION = "3.8" + IS_DEPLOYMENT_PREVIEW = "true" diff --git a/pyconfig.h.in b/pyconfig.h.in index 0d3c851a1af9a3..236cee6588d49b 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -844,6 +844,9 @@ /* Define to 1 if you have the <netpacket/packet.h> header file. */ #undef HAVE_NETPACKET_PACKET_H +/* Define to 1 if you have the <net/ethernet.h> header file. */ +#undef HAVE_NET_ETHERNET_H + /* Define to 1 if you have the <net/if.h> header file. */ #undef HAVE_NET_IF_H