diff -Nru python3.10-3.10.11/debian/changelog python3.10-3.10.12/debian/changelog --- python3.10-3.10.11/debian/changelog 2023-05-15 10:31:46.000000000 +0000 +++ python3.10-3.10.12/debian/changelog 2023-06-09 08:30:31.000000000 +0000 @@ -1,11 +1,17 @@ -python3.10 (3.10.11-1~22.10) kinetic-proposed; urgency=medium +python3.10 (3.10.12-1~22.10) kinetic-proposed; urgency=medium - * SRU: LP: #1995504: Backport 3.10.11 to Ubuntu 22.10. + * SRU: LP: #1995504: Backport the 3.10.12 release to 22.10. * Revert: - autopkgtests: Support python3-setuptools >= 64, which does PEP-660 editable installs. - -- Matthias Klose Mon, 15 May 2023 12:31:46 +0200 + -- Matthias Klose Fri, 09 Jun 2023 10:30:31 +0200 + +python3.10 (3.10.12-1) unstable; urgency=medium + + * Python 3.10.12 release. + + -- Matthias Klose Wed, 07 Jun 2023 12:09:46 +0200 python3.10 (3.10.11-1) unstable; urgency=medium @@ -69,39 +75,6 @@ -- Matthias Klose Sat, 01 Oct 2022 06:31:04 +0200 -python3.10 (3.10.7-1ubuntu0.3) kinetic-security; urgency=medium - - * SECURITY UPDATE: Possible Bypass Blocklisting - - debian/patches/CVE-2023-24329.patch: enforce - that a scheme must begin with an alphabetical ASCII character - in Lib/urllib/parse.py, Lib/test/test_urlparse.py. - - CVE-2023-24329 - - -- Leonidas Da Silva Barbosa Fri, 10 Mar 2023 07:47:39 -0300 - -python3.10 (3.10.7-1ubuntu0.2) kinetic-security; urgency=medium - - * SECURITY UPDATE: Buffer overflow - - debian/patches/CVE-2022-37454.patch: fixes buffer overflow in - Modules/_sha3/kcp/KeccakSponge.inc (LP: #1995197). - - CVE-2022-37454 - * SECURITY UPDATE: Denial of service - - debian/patches/CVE-2022-45061.patch: fix quadratic time idna decoding - in Lib/encodings/idna.py, Lib/test/test_codecs.py. - - CVE-2022-45061 - - -- Leonidas Da Silva Barbosa Thu, 24 Nov 2022 16:45:47 -0300 - -python3.10 (3.10.7-1ubuntu0.1) kinetic-security; urgency=medium - - * SECURITY UPDATE: privilege escalation via multiprocessing forkserver - start method - - debian/patches/CVE-2022-42919.patch: don't use Linux abstract sockets - in Lib/multiprocessing/connection.py. - - CVE-2022-42919 - - -- Marc Deslauriers Wed, 02 Nov 2022 14:49:29 -0400 - python3.10 (3.10.7-1) unstable; urgency=medium * Python 3.10.7 release. diff -Nru python3.10-3.10.11/debian/patches/CVE-2023-24329.patch python3.10-3.10.12/debian/patches/CVE-2023-24329.patch --- python3.10-3.10.11/debian/patches/CVE-2023-24329.patch 2023-03-10 10:47:32.000000000 +0000 +++ python3.10-3.10.12/debian/patches/CVE-2023-24329.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,73 +0,0 @@ -From 72d356e3584ebfb8e813a8e9f2cd3dccf233c0d9 Mon Sep 17 00:00:00 2001 -From: "Miss Islington (bot)" - <31488909+miss-islington@users.noreply.github.com> -Date: Sun, 13 Nov 2022 11:00:25 -0800 -Subject: [PATCH] gh-99418: Make urllib.parse.urlparse enforce that a scheme - must begin with an alphabetical ASCII character. (GH-99421) - -Prevent urllib.parse.urlparse from accepting schemes that don't begin with an alphabetical ASCII character. - -RFC 3986 defines a scheme like this: `scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )` -RFC 2234 defines an ALPHA like this: `ALPHA = %x41-5A / %x61-7A` - -The WHATWG URL spec defines a scheme like this: -`"A URL-scheme string must be one ASCII alpha, followed by zero or more of ASCII alphanumeric, U+002B (+), U+002D (-), and U+002E (.)."` -(cherry picked from commit 439b9cfaf43080e91c4ad69f312f21fa098befc7) - -Co-authored-by: Ben Kallus <49924171+kenballus@users.noreply.github.com> ---- - Lib/test/test_urlparse.py | 18 ++++++++++++++++++ - Lib/urllib/parse.py | 2 +- - ...22-11-12-15-45-51.gh-issue-99418.FxfAXS.rst | 2 ++ - 3 files changed, 21 insertions(+), 1 deletion(-) - create mode 100644 Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rst - -Index: python3.10-3.10.7/Lib/test/test_urlparse.py -=================================================================== ---- python3.10-3.10.7.orig/Lib/test/test_urlparse.py -+++ python3.10-3.10.7/Lib/test/test_urlparse.py -@@ -665,6 +665,24 @@ class UrlParseTestCase(unittest.TestCase - with self.assertRaises(ValueError): - p.port - -+ def test_attributes_bad_scheme(self): -+ """Check handling of invalid schemes.""" -+ for bytes in (False, True): -+ for parse in (urllib.parse.urlsplit, urllib.parse.urlparse): -+ for scheme in (".", "+", "-", "0", "http&", "६http"): -+ with self.subTest(bytes=bytes, parse=parse, scheme=scheme): -+ url = scheme + "://www.example.net" -+ if bytes: -+ if url.isascii(): -+ url = url.encode("ascii") -+ else: -+ continue -+ p = parse(url) -+ if bytes: -+ self.assertEqual(p.scheme, b"") -+ else: -+ self.assertEqual(p.scheme, "") -+ - def test_attributes_without_netloc(self): - # This example is straight from RFC 3261. It looks like it - # should allow the username, hostname, and port to be filled -Index: python3.10-3.10.7/Lib/urllib/parse.py -=================================================================== ---- python3.10-3.10.7.orig/Lib/urllib/parse.py -+++ python3.10-3.10.7/Lib/urllib/parse.py -@@ -470,7 +470,7 @@ def urlsplit(url, scheme='', allow_fragm - clear_cache() - netloc = query = fragment = '' - i = url.find(':') -- if i > 0: -+ if i > 0 and url[0].isascii() and url[0].isalpha(): - for c in url[:i]: - if c not in scheme_chars: - break -Index: python3.10-3.10.7/Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rst -=================================================================== ---- /dev/null -+++ python3.10-3.10.7/Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rst -@@ -0,0 +1,2 @@ -+Fix bug in :func:`urllib.parse.urlparse` that causes URL schemes that begin -+with a digit, a plus sign, or a minus sign to be parsed incorrectly. diff -Nru python3.10-3.10.11/debian/patches/series python3.10-3.10.12/debian/patches/series --- python3.10-3.10.11/debian/patches/series 2023-05-15 10:31:46.000000000 +0000 +++ python3.10-3.10.12/debian/patches/series 2022-12-07 13:52:45.000000000 +0000 @@ -40,4 +40,3 @@ reproducible-pyc.diff fix-ia64.diff gh-78214.diff -CVE-2023-24329.patch diff -Nru python3.10-3.10.11/Doc/c-api/marshal.rst python3.10-3.10.12/Doc/c-api/marshal.rst --- python3.10-3.10.11/Doc/c-api/marshal.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/c-api/marshal.rst 2023-06-06 22:30:33.000000000 +0000 @@ -25,12 +25,16 @@ the least-significant 32 bits of *value*; regardless of the size of the native :c:expr:`long` type. *version* indicates the file format. + This function can fail, in which case it sets the error indicator. + Use :c:func:`PyErr_Occurred` to check for that. .. c:function:: void PyMarshal_WriteObjectToFile(PyObject *value, FILE *file, int version) Marshal a Python object, *value*, to *file*. *version* indicates the file format. + This function can fail, in which case it sets the error indicator. + Use :c:func:`PyErr_Occurred` to check for that. .. c:function:: PyObject* PyMarshal_WriteObjectToString(PyObject *value, int version) diff -Nru python3.10-3.10.11/Doc/library/functions.rst python3.10-3.10.12/Doc/library/functions.rst --- python3.10-3.10.11/Doc/library/functions.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/library/functions.rst 2023-06-06 22:30:33.000000000 +0000 @@ -1423,8 +1423,9 @@ arguments are converted to text strings, :func:`print` cannot be used with binary mode file objects. For these, use ``file.write(...)`` instead. - Whether the output is buffered is usually determined by *file*, but if the - *flush* keyword argument is true, the stream is forcibly flushed. + Output buffering is usually determined by *file*. + However, if *flush* is true, the stream is forcibly flushed. + .. versionchanged:: 3.3 Added the *flush* keyword argument. diff -Nru python3.10-3.10.11/Doc/library/shutil.rst python3.10-3.10.12/Doc/library/shutil.rst --- python3.10-3.10.11/Doc/library/shutil.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/library/shutil.rst 2023-06-06 22:30:33.000000000 +0000 @@ -620,7 +620,7 @@ Remove the archive format *name* from the list of supported formats. -.. function:: unpack_archive(filename[, extract_dir[, format]]) +.. function:: unpack_archive(filename[, extract_dir[, format[, filter]]]) Unpack an archive. *filename* is the full path of the archive. @@ -634,6 +634,15 @@ registered for that extension. In case none is found, a :exc:`ValueError` is raised. + The keyword-only *filter* argument, which was added in Python 3.10.12, + is passed to the underlying unpacking function. + For zip files, *filter* is not accepted. + For tar files, it is recommended to set it to ``'data'``, + unless using features specific to tar and UNIX-like filesystems. + (See :ref:`tarfile-extraction-filter` for details.) + The ``'data'`` filter will become the default for tar files + in Python 3.14. + .. audit-event:: shutil.unpack_archive filename,extract_dir,format shutil.unpack_archive .. warning:: @@ -646,6 +655,9 @@ .. versionchanged:: 3.7 Accepts a :term:`path-like object` for *filename* and *extract_dir*. + .. versionchanged:: 3.10.12 + Added the *filter* argument. + .. function:: register_unpack_format(name, extensions, function[, extra_args[, description]]) Registers an unpack format. *name* is the name of the format and @@ -653,11 +665,14 @@ ``.zip`` for Zip files. *function* is the callable that will be used to unpack archives. The - callable will receive the path of the archive, followed by the directory - the archive must be extracted to. + callable will receive: - When provided, *extra_args* is a sequence of ``(name, value)`` tuples that - will be passed as keywords arguments to the callable. + - the path of the archive, as a positional argument; + - the directory the archive must be extracted to, as a positional argument; + - possibly a *filter* keyword argument, if it was given to + :func:`unpack_archive`; + - additional keyword arguments, specified by *extra_args* as a sequence + of ``(name, value)`` tuples. *description* can be provided to describe the format, and will be returned by the :func:`get_unpack_formats` function. diff -Nru python3.10-3.10.11/Doc/library/socket.rst python3.10-3.10.12/Doc/library/socket.rst --- python3.10-3.10.11/Doc/library/socket.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/library/socket.rst 2023-06-06 22:30:33.000000000 +0000 @@ -1596,7 +1596,7 @@ much data, if any, was successfully sent. .. versionchanged:: 3.5 - The socket timeout is no more reset each time data is sent successfully. + The socket timeout is no longer reset each time data is sent successfully. The socket timeout is now the maximum total duration to send all data. .. versionchanged:: 3.5 @@ -1814,8 +1814,8 @@ * In *non-blocking mode*, operations fail (with an error that is unfortunately system-dependent) if they cannot be completed immediately: functions from the - :mod:`select` can be used to know when and whether a socket is available for - reading or writing. + :mod:`select` module can be used to know when and whether a socket is available + for reading or writing. * In *timeout mode*, operations fail if they cannot be completed within the timeout specified for the socket (they raise a :exc:`timeout` exception) @@ -2004,7 +2004,7 @@ socket.socket(socket.AF_CAN, socket.SOCK_DGRAM, socket.CAN_BCM) After binding (:const:`CAN_RAW`) or connecting (:const:`CAN_BCM`) the socket, you -can use the :meth:`socket.send`, and the :meth:`socket.recv` operations (and +can use the :meth:`socket.send` and :meth:`socket.recv` operations (and their counterparts) on the socket object as usual. This last example might require special privileges:: diff -Nru python3.10-3.10.11/Doc/library/sys.rst python3.10-3.10.12/Doc/library/sys.rst --- python3.10-3.10.11/Doc/library/sys.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/library/sys.rst 2023-06-06 22:30:33.000000000 +0000 @@ -651,7 +651,7 @@ the encoding used with the :term:`filesystem error handler ` to convert between Unicode filenames and bytes filenames. The filesystem error handler is returned from - :func:`getfilesystemencoding`. + :func:`getfilesystemencodeerrors`. For best compatibility, str should be used for filenames in all cases, although representing filenames as bytes is also supported. Functions diff -Nru python3.10-3.10.11/Doc/library/tarfile.rst python3.10-3.10.12/Doc/library/tarfile.rst --- python3.10-3.10.11/Doc/library/tarfile.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/library/tarfile.rst 2023-06-06 22:30:33.000000000 +0000 @@ -206,6 +206,38 @@ Is raised by :meth:`TarInfo.frombuf` if the buffer it gets is invalid. +.. exception:: FilterError + + Base class for members :ref:`refused ` by + filters. + + .. attribute:: tarinfo + + Information about the member that the filter refused to extract, + as :ref:`TarInfo `. + +.. exception:: AbsolutePathError + + Raised to refuse extracting a member with an absolute path. + +.. exception:: OutsideDestinationError + + Raised to refuse extracting a member outside the destination directory. + +.. exception:: SpecialFileError + + Raised to refuse extracting a special file (e.g. a device or pipe). + +.. exception:: AbsoluteLinkError + + Raised to refuse extracting a symbolic link with an absolute path. + +.. exception:: LinkOutsideDestinationError + + Raised to refuse extracting a symbolic link pointing outside the destination + directory. + + The following constants are available at the module level: .. data:: ENCODING @@ -316,11 +348,8 @@ *debug* can be set from ``0`` (no debug messages) up to ``3`` (all debug messages). The messages are written to ``sys.stderr``. - If *errorlevel* is ``0``, all errors are ignored when using :meth:`TarFile.extract`. - Nevertheless, they appear as error messages in the debug output, when debugging - is enabled. If ``1``, all *fatal* errors are raised as :exc:`OSError` - exceptions. If ``2``, all *non-fatal* errors are raised as :exc:`TarError` - exceptions as well. + *errorlevel* controls how extraction errors are handled, + see :attr:`the corresponding attribute <~TarFile.errorlevel>`. The *encoding* and *errors* arguments define the character encoding to be used for reading or writing the archive and how conversion errors are going @@ -387,7 +416,7 @@ available. -.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False) +.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False, filter=None) Extract all members from the archive to the current working directory or directory *path*. If optional *members* is given, it must be a subset of the @@ -401,6 +430,12 @@ are used to set the owner/group for the extracted files. Otherwise, the named values from the tarfile are used. + The *filter* argument, which was added in Python 3.10.12, specifies how + ``members`` are modified or rejected before extraction. + See :ref:`tarfile-extraction-filter` for details. + It is recommended to set this explicitly depending on which *tar* features + you need to support. + .. warning:: Never extract archives from untrusted sources without prior inspection. @@ -408,14 +443,20 @@ that have absolute filenames starting with ``"/"`` or filenames with two dots ``".."``. + Set ``filter='data'`` to prevent the most dangerous security issues, + and read the :ref:`tarfile-extraction-filter` section for details. + .. versionchanged:: 3.5 Added the *numeric_owner* parameter. .. versionchanged:: 3.6 The *path* parameter accepts a :term:`path-like object`. + .. versionchanged:: 3.10.12 + Added the *filter* parameter. + -.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False) +.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False, filter=None) Extract a member from the archive to the current working directory, using its full name. Its file information is extracted as accurately as possible. *member* @@ -423,9 +464,8 @@ directory using *path*. *path* may be a :term:`path-like object`. File attributes (owner, mtime, mode) are set unless *set_attrs* is false. - If *numeric_owner* is :const:`True`, the uid and gid numbers from the tarfile - are used to set the owner/group for the extracted files. Otherwise, the named - values from the tarfile are used. + The *numeric_owner* and *filter* arguments are the same as + for :meth:`extractall`. .. note:: @@ -436,6 +476,9 @@ See the warning for :meth:`extractall`. + Set ``filter='data'`` to prevent the most dangerous security issues, + and read the :ref:`tarfile-extraction-filter` section for details. + .. versionchanged:: 3.2 Added the *set_attrs* parameter. @@ -445,6 +488,9 @@ .. versionchanged:: 3.6 The *path* parameter accepts a :term:`path-like object`. + .. versionchanged:: 3.10.12 + Added the *filter* parameter. + .. method:: TarFile.extractfile(member) @@ -457,6 +503,57 @@ .. versionchanged:: 3.3 Return an :class:`io.BufferedReader` object. +.. attribute:: TarFile.errorlevel + :type: int + + If *errorlevel* is ``0``, errors are ignored when using :meth:`TarFile.extract` + and :meth:`TarFile.extractall`. + Nevertheless, they appear as error messages in the debug output when + *debug* is greater than 0. + If ``1`` (the default), all *fatal* errors are raised as :exc:`OSError` or + :exc:`FilterError` exceptions. If ``2``, all *non-fatal* errors are raised + as :exc:`TarError` exceptions as well. + + Some exceptions, e.g. ones caused by wrong argument types or data + corruption, are always raised. + + Custom :ref:`extraction filters ` + should raise :exc:`FilterError` for *fatal* errors + and :exc:`ExtractError` for *non-fatal* ones. + + Note that when an exception is raised, the archive may be partially + extracted. It is the user’s responsibility to clean up. + +.. attribute:: TarFile.extraction_filter + + .. versionadded:: 3.10.12 + + The :ref:`extraction filter ` used + as a default for the *filter* argument of :meth:`~TarFile.extract` + and :meth:`~TarFile.extractall`. + + The attribute may be ``None`` or a callable. + String names are not allowed for this attribute, unlike the *filter* + argument to :meth:`~TarFile.extract`. + + If ``extraction_filter`` is ``None`` (the default), + calling an extraction method without a *filter* argument will + use the :func:`fully_trusted ` filter for + compatibility with previous Python versions. + + In Python 3.12+, leaving ``extraction_filter=None`` will emit a + ``DeprecationWarning``. + + In Python 3.14+, leaving ``extraction_filter=None`` will cause + extraction methods to use the :func:`data ` filter by default. + + The attribute may be set on instances or overridden in subclasses. + It also is possible to set it on the ``TarFile`` class itself to set a + global default, although, since it affects all uses of *tarfile*, + it is best practice to only do so in top-level applications or + :mod:`site configuration `. + To set a global default this way, a filter function needs to be wrapped in + :func:`staticmethod()` to prevent injection of a ``self`` argument. .. method:: TarFile.add(name, arcname=None, recursive=True, *, filter=None) @@ -532,7 +629,27 @@ It does *not* contain the file's data itself. :class:`TarInfo` objects are returned by :class:`TarFile`'s methods -:meth:`getmember`, :meth:`getmembers` and :meth:`gettarinfo`. +:meth:`~TarFile.getmember`, :meth:`~TarFile.getmembers` and +:meth:`~TarFile.gettarinfo`. + +Modifying the objects returned by :meth:`~!TarFile.getmember` or +:meth:`~!TarFile.getmembers` will affect all subsequent +operations on the archive. +For cases where this is unwanted, you can use :mod:`copy.copy() ` or +call the :meth:`~TarInfo.replace` method to create a modified copy in one step. + +Several attributes can be set to ``None`` to indicate that a piece of metadata +is unused or unknown. +Different :class:`TarInfo` methods handle ``None`` differently: + +- The :meth:`~TarFile.extract` or :meth:`~TarFile.extractall` methods will + ignore the corresponding metadata, leaving it set to a default. +- :meth:`~TarFile.addfile` will fail. +- :meth:`~TarFile.list` will print a placeholder string. + + +.. versionchanged:: 3.10.12 + Added :meth:`~TarInfo.replace` and handling of ``None``. .. class:: TarInfo(name="") @@ -566,24 +683,39 @@ .. attribute:: TarInfo.name + :type: str Name of the archive member. .. attribute:: TarInfo.size + :type: int Size in bytes. .. attribute:: TarInfo.mtime + :type: int | float - Time of last modification. + Time of last modification in seconds since the :ref:`epoch `, + as in :attr:`os.stat_result.st_mtime`. + .. versionchanged:: 3.10.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.mode + :type: int + + Permission bits, as for :func:`os.chmod`. - Permission bits. + .. versionchanged:: 3.10.12 + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.type @@ -595,35 +727,76 @@ .. attribute:: TarInfo.linkname + :type: str Name of the target file name, which is only present in :class:`TarInfo` objects of type :const:`LNKTYPE` and :const:`SYMTYPE`. .. attribute:: TarInfo.uid + :type: int User ID of the user who originally stored this member. + .. versionchanged:: 3.10.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.gid + :type: int Group ID of the user who originally stored this member. + .. versionchanged:: 3.10.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.uname + :type: str User name. + .. versionchanged:: 3.10.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.gname + :type: str Group name. + .. versionchanged:: 3.10.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.pax_headers + :type: dict A dictionary containing key-value pairs of an associated pax extended header. +.. method:: TarInfo.replace(name=..., mtime=..., mode=..., linkname=..., + uid=..., gid=..., uname=..., gname=..., + deep=True) + + .. versionadded:: 3.10.12 + + Return a *new* copy of the :class:`!TarInfo` object with the given attributes + changed. For example, to return a ``TarInfo`` with the group name set to + ``'staff'``, use:: + + new_tarinfo = old_tarinfo.replace(gname='staff') + + By default, a deep copy is made. + If *deep* is false, the copy is shallow, i.e. ``pax_headers`` + and any custom attributes are shared with the original ``TarInfo`` object. A :class:`TarInfo` object also provides some convenient query methods: @@ -673,9 +846,259 @@ Return :const:`True` if it is one of character device, block device or FIFO. +.. _tarfile-extraction-filter: + +Extraction filters +------------------ + +.. versionadded:: 3.10.12 + +The *tar* format is designed to capture all details of a UNIX-like filesystem, +which makes it very powerful. +Unfortunately, the features make it easy to create tar files that have +unintended -- and possibly malicious -- effects when extracted. +For example, extracting a tar file can overwrite arbitrary files in various +ways (e.g. by using absolute paths, ``..`` path components, or symlinks that +affect later members). + +In most cases, the full functionality is not needed. +Therefore, *tarfile* supports extraction filters: a mechanism to limit +functionality, and thus mitigate some of the security issues. + +.. seealso:: + + :pep:`706` + Contains further motivation and rationale behind the design. + +The *filter* argument to :meth:`TarFile.extract` or :meth:`~TarFile.extractall` +can be: + +* the string ``'fully_trusted'``: Honor all metadata as specified in the + archive. + Should be used if the user trusts the archive completely, or implements + their own complex verification. + +* the string ``'tar'``: Honor most *tar*-specific features (i.e. features of + UNIX-like filesystems), but block features that are very likely to be + surprising or malicious. See :func:`tar_filter` for details. + +* the string ``'data'``: Ignore or block most features specific to UNIX-like + filesystems. Intended for extracting cross-platform data archives. + See :func:`data_filter` for details. + +* ``None`` (default): Use :attr:`TarFile.extraction_filter`. + + If that is also ``None`` (the default), the ``'fully_trusted'`` + filter will be used (for compatibility with earlier versions of Python). + + In Python 3.12, the default will emit a ``DeprecationWarning``. + + In Python 3.14, the ``'data'`` filter will become the default instead. + It's possible to switch earlier; see :attr:`TarFile.extraction_filter`. + +* A callable which will be called for each extracted member with a + :ref:`TarInfo ` describing the member and the destination + path to where the archive is extracted (i.e. the same path is used for all + members):: + + filter(/, member: TarInfo, path: str) -> TarInfo | None + + The callable is called just before each member is extracted, so it can + take the current state of the disk into account. + It can: + + - return a :class:`TarInfo` object which will be used instead of the metadata + in the archive, or + - return ``None``, in which case the member will be skipped, or + - raise an exception to abort the operation or skip the member, + depending on :attr:`~TarFile.errorlevel`. + Note that when extraction is aborted, :meth:`~TarFile.extractall` may leave + the archive partially extracted. It does not attempt to clean up. + +Default named filters +~~~~~~~~~~~~~~~~~~~~~ + +The pre-defined, named filters are available as functions, so they can be +reused in custom filters: + +.. function:: fully_trusted_filter(/, member, path) + + Return *member* unchanged. + + This implements the ``'fully_trusted'`` filter. + +.. function:: tar_filter(/, member, path) + + Implements the ``'tar'`` filter. + + - Strip leading slashes (``/`` and :attr:`os.sep`) from filenames. + - :ref:`Refuse ` to extract files with absolute + paths (in case the name is absolute + even after stripping slashes, e.g. ``C:/foo`` on Windows). + This raises :class:`~tarfile.AbsolutePathError`. + - :ref:`Refuse ` to extract files whose absolute + path (after following symlinks) would end up outside the destination. + This raises :class:`~tarfile.OutsideDestinationError`. + - Clear high mode bits (setuid, setgid, sticky) and group/other write bits + (:attr:`~stat.S_IWGRP`|:attr:`~stat.S_IWOTH`). + + Return the modified ``TarInfo`` member. + +.. function:: data_filter(/, member, path) + + Implements the ``'data'`` filter. + In addition to what ``tar_filter`` does: + + - :ref:`Refuse ` to extract links (hard or soft) + that link to absolute paths, or ones that link outside the destination. + + This raises :class:`~tarfile.AbsoluteLinkError` or + :class:`~tarfile.LinkOutsideDestinationError`. + + Note that such files are refused even on platforms that do not support + symbolic links. + + - :ref:`Refuse ` to extract device files + (including pipes). + This raises :class:`~tarfile.SpecialFileError`. + + - For regular files, including hard links: + + - Set the owner read and write permissions + (:attr:`~stat.S_IRUSR`|:attr:`~stat.S_IWUSR`). + - Remove the group & other executable permission + (:attr:`~stat.S_IXGRP`|:attr:`~stat.S_IXOTH`) + if the owner doesn’t have it (:attr:`~stat.S_IXUSR`). + + - For other files (directories), set ``mode`` to ``None``, so + that extraction methods skip applying permission bits. + - Set user and group info (``uid``, ``gid``, ``uname``, ``gname``) + to ``None``, so that extraction methods skip setting it. + + Return the modified ``TarInfo`` member. + + +.. _tarfile-extraction-refuse: + +Filter errors +~~~~~~~~~~~~~ + +When a filter refuses to extract a file, it will raise an appropriate exception, +a subclass of :class:`~tarfile.FilterError`. +This will abort the extraction if :attr:`TarFile.errorlevel` is 1 or more. +With ``errorlevel=0`` the error will be logged and the member will be skipped, +but extraction will continue. + + +Hints for further verification +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Even with ``filter='data'``, *tarfile* is not suited for extracting untrusted +files without prior inspection. +Among other issues, the pre-defined filters do not prevent denial-of-service +attacks. Users should do additional checks. + +Here is an incomplete list of things to consider: + +* Extract to a :func:`new temporary directory ` + to prevent e.g. exploiting pre-existing links, and to make it easier to + clean up after a failed extraction. +* When working with untrusted data, use external (e.g. OS-level) limits on + disk, memory and CPU usage. +* Check filenames against an allow-list of characters + (to filter out control characters, confusables, foreign path separators, + etc.). +* Check that filenames have expected extensions (discouraging files that + execute when you “click on them”, or extension-less files like Windows special device names). +* Limit the number of extracted files, total size of extracted data, + filename length (including symlink length), and size of individual files. +* Check for files that would be shadowed on case-insensitive filesystems. + +Also note that: + +* Tar files may contain multiple versions of the same file. + Later ones are expected to overwrite any earlier ones. + This feature is crucial to allow updating tape archives, but can be abused + maliciously. +* *tarfile* does not protect against issues with “live” data, + e.g. an attacker tinkering with the destination (or source) directory while + extraction (or archiving) is in progress. + + +Supporting older Python versions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Extraction filters were added to Python 3.12, and are backported to older +versions as security updates. +To check whether the feature is available, use e.g. +``hasattr(tarfile, 'data_filter')`` rather than checking the Python version. + +The following examples show how to support Python versions with and without +the feature. +Note that setting ``extraction_filter`` will affect any subsequent operations. + +* Fully trusted archive:: + + my_tarfile.extraction_filter = (lambda member, path: member) + my_tarfile.extractall() + +* Use the ``'data'`` filter if available, but revert to Python 3.11 behavior + (``'fully_trusted'``) if this feature is not available:: + + my_tarfile.extraction_filter = getattr(tarfile, 'data_filter', + (lambda member, path: member)) + my_tarfile.extractall() + +* Use the ``'data'`` filter; *fail* if it is not available:: + + my_tarfile.extractall(filter=tarfile.data_filter) + + or:: + + my_tarfile.extraction_filter = tarfile.data_filter + my_tarfile.extractall() + +* Use the ``'data'`` filter; *warn* if it is not available:: + + if hasattr(tarfile, 'data_filter'): + my_tarfile.extractall(filter='data') + else: + # remove this when no longer needed + warn_the_user('Extracting may be unsafe; consider updating Python') + my_tarfile.extractall() + + +Stateful extraction filter example +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +While *tarfile*'s extraction methods take a simple *filter* callable, +custom filters may be more complex objects with an internal state. +It may be useful to write these as context managers, to be used like this:: + + with StatefulFilter() as filter_func: + tar.extractall(path, filter=filter_func) + +Such a filter can be written as, for example:: + + class StatefulFilter: + def __init__(self): + self.file_count = 0 + + def __enter__(self): + return self + + def __call__(self, member, path): + self.file_count += 1 + return member + + def __exit__(self, *exc_info): + print(f'{self.file_count} files extracted') + + .. _tarfile-commandline: .. program:: tarfile + Command-Line Interface ---------------------- @@ -745,6 +1168,15 @@ Verbose output. +.. cmdoption:: --filter + + Specifies the *filter* for ``--extract``. + See :ref:`tarfile-extraction-filter` for details. + Only string names are accepted (that is, ``fully_trusted``, ``tar``, + and ``data``). + + .. versionadded:: 3.10.12 + .. _tar-examples: Examples diff -Nru python3.10-3.10.11/Doc/library/traceback.rst python3.10-3.10.12/Doc/library/traceback.rst --- python3.10-3.10.11/Doc/library/traceback.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/library/traceback.rst 2023-06-06 22:30:33.000000000 +0000 @@ -259,6 +259,13 @@ For syntax errors - the line number where the error occurred. + .. attribute:: end_lineno + + For syntax errors - the end line number where the error occurred. + Can be ``None`` if not present. + + .. versionadded:: 3.10 + .. attribute:: text For syntax errors - the text where the error occurred. @@ -267,6 +274,13 @@ For syntax errors - the offset into the text where the error occurred. + .. attribute:: end_offset + + For syntax errors - the end offset into the text where the error occurred. + Can be ``None`` if not present. + + .. versionadded:: 3.10 + .. attribute:: msg For syntax errors - the compiler error message. diff -Nru python3.10-3.10.11/Doc/library/urllib.parse.rst python3.10-3.10.12/Doc/library/urllib.parse.rst --- python3.10-3.10.11/Doc/library/urllib.parse.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/library/urllib.parse.rst 2023-06-06 22:30:33.000000000 +0000 @@ -159,6 +159,10 @@ ParseResult(scheme='http', netloc='www.cwi.nl:80', path='/%7Eguido/Python.html', params='', query='', fragment='') + .. warning:: + + :func:`urlparse` does not perform validation. See :ref:`URL parsing + security ` for details. .. versionchanged:: 3.2 Added IPv6 URL parsing capabilities. @@ -324,8 +328,14 @@ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is decomposed before parsing, no error will be raised. - Following the `WHATWG spec`_ that updates RFC 3986, ASCII newline - ``\n``, ``\r`` and tab ``\t`` characters are stripped from the URL. + Following some of the `WHATWG spec`_ that updates RFC 3986, leading C0 + control and space characters are stripped from the URL. ``\n``, + ``\r`` and tab ``\t`` characters are removed from the URL at any position. + + .. warning:: + + :func:`urlsplit` does not perform validation. See :ref:`URL parsing + security ` for details. .. versionchanged:: 3.6 Out-of-range port numbers now raise :exc:`ValueError`, instead of @@ -338,6 +348,9 @@ .. versionchanged:: 3.10 ASCII newline and tab characters are stripped from the URL. + .. versionchanged:: 3.10.12 + Leading WHATWG C0 control and space characters are stripped from the URL. + .. _WHATWG spec: https://url.spec.whatwg.org/#concept-basic-url-parser .. function:: urlunsplit(parts) @@ -414,6 +427,27 @@ or ``scheme://host/path``). If *url* is not a wrapped URL, it is returned without changes. +.. _url-parsing-security: + +URL parsing security +-------------------- + +The :func:`urlsplit` and :func:`urlparse` APIs do not perform **validation** of +inputs. They may not raise errors on inputs that other applications consider +invalid. They may also succeed on some inputs that might not be considered +URLs elsewhere. Their purpose is for practical functionality rather than +purity. + +Instead of raising an exception on unusual input, they may instead return some +component parts as empty strings. Or components may contain more than perhaps +they should. + +We recommend that users of these APIs where the values may be used anywhere +with security implications code defensively. Do some verification within your +code before trusting a returned component part. Does that ``scheme`` make +sense? Is that a sensible ``path``? Is there anything strange about that +``hostname``? etc. + .. _parsing-ascii-encoded-bytes: Parsing ASCII Encoded Bytes diff -Nru python3.10-3.10.11/Doc/whatsnew/3.10.rst python3.10-3.10.12/Doc/whatsnew/3.10.rst --- python3.10-3.10.11/Doc/whatsnew/3.10.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Doc/whatsnew/3.10.rst 2023-06-06 22:30:33.000000000 +0000 @@ -2332,3 +2332,19 @@ text, it will warn and act as if a match was not found (or for test commands, as if the test failed). (Contributed by Petr Viktorin in :gh:`98966`.) + +Notable Changes in 3.10.12 +========================== + +tarfile +------- + +* The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`, + have a new a *filter* argument that allows limiting tar features than may be + surprising or dangerous, such as creating files outside the destination + directory. + See :ref:`tarfile-extraction-filter` for details. + In Python 3.12, use without the *filter* argument will show a + :exc:`DeprecationWarning`. + In Python 3.14, the default will switch to ``'data'``. + (Contributed by Petr Viktorin in :pep:`706`.) diff -Nru python3.10-3.10.11/Include/patchlevel.h python3.10-3.10.12/Include/patchlevel.h --- python3.10-3.10.11/Include/patchlevel.h 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Include/patchlevel.h 2023-06-06 22:30:33.000000000 +0000 @@ -18,12 +18,12 @@ /*--start constants--*/ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 10 -#define PY_MICRO_VERSION 11 +#define PY_MICRO_VERSION 12 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL #define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.10.11" +#define PY_VERSION "3.10.12" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff -Nru python3.10-3.10.11/Lib/cProfile.py python3.10-3.10.12/Lib/cProfile.py --- python3.10-3.10.11/Lib/cProfile.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/cProfile.py 2023-06-06 22:30:33.000000000 +0000 @@ -7,6 +7,7 @@ __all__ = ["run", "runctx", "Profile"] import _lsprof +import io import profile as _pyprofile # ____________________________________________________________ @@ -167,7 +168,7 @@ else: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) - with open(progname, 'rb') as fp: + with io.open_code(progname) as fp: code = compile(fp.read(), progname, 'exec') globs = { '__file__': progname, diff -Nru python3.10-3.10.11/Lib/http/server.py python3.10-3.10.12/Lib/http/server.py --- python3.10-3.10.11/Lib/http/server.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/http/server.py 2023-06-06 22:30:33.000000000 +0000 @@ -791,7 +791,7 @@ displaypath = urllib.parse.unquote(self.path, errors='surrogatepass') except UnicodeDecodeError: - displaypath = urllib.parse.unquote(path) + displaypath = urllib.parse.unquote(self.path) displaypath = html.escape(displaypath, quote=False) enc = sys.getfilesystemencoding() title = 'Directory listing for %s' % displaypath diff -Nru python3.10-3.10.11/Lib/profile.py python3.10-3.10.12/Lib/profile.py --- python3.10-3.10.11/Lib/profile.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/profile.py 2023-06-06 22:30:33.000000000 +0000 @@ -24,6 +24,7 @@ # governing permissions and limitations under the License. +import io import sys import time import marshal @@ -587,7 +588,7 @@ else: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) - with open(progname, 'rb') as fp: + with io.open_code(progname) as fp: code = compile(fp.read(), progname, 'exec') globs = { '__file__': progname, diff -Nru python3.10-3.10.11/Lib/pydoc_data/topics.py python3.10-3.10.12/Lib/pydoc_data/topics.py --- python3.10-3.10.11/Lib/pydoc_data/topics.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/pydoc_data/topics.py 2023-06-06 22:30:33.000000000 +0000 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Apr 4 22:56:51 2023 +# Autogenerated by Sphinx on Tue Jun 6 23:30:19 2023 topics = {'assert': 'The "assert" statement\n' '**********************\n' '\n' @@ -11729,7 +11729,7 @@ 'followed by\n' ' the string itself.\n' '\n' - 'str.rsplit(sep=None, maxsplit=-1)\n' + 'str.rsplit(sep=None, maxsplit=- 1)\n' '\n' ' Return a list of the words in the string, using *sep* ' 'as the\n' @@ -11770,7 +11770,7 @@ " >>> 'Monty Python'.removesuffix(' Python')\n" " 'Monty'\n" '\n' - 'str.split(sep=None, maxsplit=-1)\n' + 'str.split(sep=None, maxsplit=- 1)\n' '\n' ' Return a list of the words in the string, using *sep* ' 'as the\n' @@ -12802,7 +12802,7 @@ ' points. All the code points in the range "U+0000 - ' 'U+10FFFF"\n' ' can be represented in a string. Python doesn’t have a ' - 'char\n' + '"char"\n' ' type; instead, every code point in the string is ' 'represented\n' ' as a string object with length "1". The built-in ' diff -Nru python3.10-3.10.11/Lib/shutil.py python3.10-3.10.12/Lib/shutil.py --- python3.10-3.10.11/Lib/shutil.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/shutil.py 2023-06-06 22:30:33.000000000 +0000 @@ -1222,7 +1222,7 @@ finally: zip.close() -def _unpack_tarfile(filename, extract_dir): +def _unpack_tarfile(filename, extract_dir, *, filter=None): """Unpack tar/tar.gz/tar.bz2/tar.xz `filename` to `extract_dir` """ import tarfile # late import for breaking circular dependency @@ -1232,7 +1232,7 @@ raise ReadError( "%s is not a compressed or uncompressed tar file" % filename) try: - tarobj.extractall(extract_dir) + tarobj.extractall(extract_dir, filter=filter) finally: tarobj.close() @@ -1265,7 +1265,7 @@ return name return None -def unpack_archive(filename, extract_dir=None, format=None): +def unpack_archive(filename, extract_dir=None, format=None, *, filter=None): """Unpack an archive. `filename` is the name of the archive. @@ -1279,6 +1279,9 @@ was registered for that extension. In case none is found, a ValueError is raised. + + If `filter` is given, it is passed to the underlying + extraction function. """ sys.audit("shutil.unpack_archive", filename, extract_dir, format) @@ -1288,6 +1291,10 @@ extract_dir = os.fspath(extract_dir) filename = os.fspath(filename) + if filter is None: + filter_kwargs = {} + else: + filter_kwargs = {'filter': filter} if format is not None: try: format_info = _UNPACK_FORMATS[format] @@ -1295,7 +1302,7 @@ raise ValueError("Unknown unpack format '{0}'".format(format)) from None func = format_info[1] - func(filename, extract_dir, **dict(format_info[2])) + func(filename, extract_dir, **dict(format_info[2]), **filter_kwargs) else: # we need to look at the registered unpackers supported extensions format = _find_unpack_format(filename) @@ -1303,7 +1310,7 @@ raise ReadError("Unknown archive format '{0}'".format(filename)) func = _UNPACK_FORMATS[format][1] - kwargs = dict(_UNPACK_FORMATS[format][2]) + kwargs = dict(_UNPACK_FORMATS[format][2]) | filter_kwargs func(filename, extract_dir, **kwargs) diff -Nru python3.10-3.10.11/Lib/tarfile.py python3.10-3.10.12/Lib/tarfile.py --- python3.10-3.10.11/Lib/tarfile.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/tarfile.py 2023-06-06 22:30:33.000000000 +0000 @@ -46,6 +46,7 @@ import struct import copy import re +import warnings try: import pwd @@ -71,6 +72,7 @@ "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT", "DEFAULT_FORMAT", "open"] + #--------------------------------------------------------- # tar constants #--------------------------------------------------------- @@ -158,6 +160,8 @@ def stn(s, length, encoding, errors): """Convert a string to a null-terminated bytes object. """ + if s is None: + raise ValueError("metadata cannot contain None") s = s.encode(encoding, errors) return s[:length] + (length - len(s)) * NUL @@ -709,9 +713,127 @@ super().__init__(fileobj) #class ExFileObject + +#----------------------------- +# extraction filters (PEP 706) +#----------------------------- + +class FilterError(TarError): + pass + +class AbsolutePathError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'member {tarinfo.name!r} has an absolute path') + +class OutsideDestinationError(FilterError): + def __init__(self, tarinfo, path): + self.tarinfo = tarinfo + self._path = path + super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, ' + + 'which is outside the destination') + +class SpecialFileError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'{tarinfo.name!r} is a special file') + +class AbsoluteLinkError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'{tarinfo.name!r} is a symlink to an absolute path') + +class LinkOutsideDestinationError(FilterError): + def __init__(self, tarinfo, path): + self.tarinfo = tarinfo + self._path = path + super().__init__(f'{tarinfo.name!r} would link to {path!r}, ' + + 'which is outside the destination') + +def _get_filtered_attrs(member, dest_path, for_data=True): + new_attrs = {} + name = member.name + dest_path = os.path.realpath(dest_path) + # Strip leading / (tar's directory separator) from filenames. + # Include os.sep (target OS directory separator) as well. + if name.startswith(('/', os.sep)): + name = new_attrs['name'] = member.path.lstrip('/' + os.sep) + if os.path.isabs(name): + # Path is absolute even after stripping. + # For example, 'C:/foo' on Windows. + raise AbsolutePathError(member) + # Ensure we stay in the destination + target_path = os.path.realpath(os.path.join(dest_path, name)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise OutsideDestinationError(member, target_path) + # Limit permissions (no high bits, and go-w) + mode = member.mode + if mode is not None: + # Strip high bits & group/other write bits + mode = mode & 0o755 + if for_data: + # For data, handle permissions & file types + if member.isreg() or member.islnk(): + if not mode & 0o100: + # Clear executable bits if not executable by user + mode &= ~0o111 + # Ensure owner can read & write + mode |= 0o600 + elif member.isdir() or member.issym(): + # Ignore mode for directories & symlinks + mode = None + else: + # Reject special files + raise SpecialFileError(member) + if mode != member.mode: + new_attrs['mode'] = mode + if for_data: + # Ignore ownership for 'data' + if member.uid is not None: + new_attrs['uid'] = None + if member.gid is not None: + new_attrs['gid'] = None + if member.uname is not None: + new_attrs['uname'] = None + if member.gname is not None: + new_attrs['gname'] = None + # Check link destination for 'data' + if member.islnk() or member.issym(): + if os.path.isabs(member.linkname): + raise AbsoluteLinkError(member) + target_path = os.path.realpath(os.path.join(dest_path, member.linkname)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise LinkOutsideDestinationError(member, target_path) + return new_attrs + +def fully_trusted_filter(member, dest_path): + return member + +def tar_filter(member, dest_path): + new_attrs = _get_filtered_attrs(member, dest_path, False) + if new_attrs: + return member.replace(**new_attrs, deep=False) + return member + +def data_filter(member, dest_path): + new_attrs = _get_filtered_attrs(member, dest_path, True) + if new_attrs: + return member.replace(**new_attrs, deep=False) + return member + +_NAMED_FILTERS = { + "fully_trusted": fully_trusted_filter, + "tar": tar_filter, + "data": data_filter, +} + #------------------ # Exported Classes #------------------ + +# Sentinel for replace() defaults, meaning "don't change the attribute" +_KEEP = object() + class TarInfo(object): """Informational class which holds the details about an archive member given by a tar header block. @@ -792,12 +914,44 @@ def __repr__(self): return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + def replace(self, *, + name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP, + uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP, + deep=True, _KEEP=_KEEP): + """Return a deep copy of self with the given attributes replaced. + """ + if deep: + result = copy.deepcopy(self) + else: + result = copy.copy(self) + if name is not _KEEP: + result.name = name + if mtime is not _KEEP: + result.mtime = mtime + if mode is not _KEEP: + result.mode = mode + if linkname is not _KEEP: + result.linkname = linkname + if uid is not _KEEP: + result.uid = uid + if gid is not _KEEP: + result.gid = gid + if uname is not _KEEP: + result.uname = uname + if gname is not _KEEP: + result.gname = gname + return result + def get_info(self): """Return the TarInfo's attributes as a dictionary. """ + if self.mode is None: + mode = None + else: + mode = self.mode & 0o7777 info = { "name": self.name, - "mode": self.mode & 0o7777, + "mode": mode, "uid": self.uid, "gid": self.gid, "size": self.size, @@ -820,6 +974,9 @@ """Return a tar header as a string of 512 byte blocks. """ info = self.get_info() + for name, value in info.items(): + if value is None: + raise ValueError("%s may not be None" % name) if format == USTAR_FORMAT: return self.create_ustar_header(info, encoding, errors) @@ -950,6 +1107,12 @@ devmajor = stn("", 8, encoding, errors) devminor = stn("", 8, encoding, errors) + # None values in metadata should cause ValueError. + # itn()/stn() do this for all fields except type. + filetype = info.get("type", REGTYPE) + if filetype is None: + raise ValueError("TarInfo.type must not be None") + parts = [ stn(info.get("name", ""), 100, encoding, errors), itn(info.get("mode", 0) & 0o7777, 8, format), @@ -958,7 +1121,7 @@ itn(info.get("size", 0), 12, format), itn(info.get("mtime", 0), 12, format), b" ", # checksum field - info.get("type", REGTYPE), + filetype, stn(info.get("linkname", ""), 100, encoding, errors), info.get("magic", POSIX_MAGIC), stn(info.get("uname", ""), 32, encoding, errors), @@ -1468,6 +1631,8 @@ fileobject = ExFileObject # The file-object for extractfile(). + extraction_filter = None # The default filter for extraction. + def __init__(self, name=None, mode="r", fileobj=None, format=None, tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, errors="surrogateescape", pax_headers=None, debug=None, @@ -1940,7 +2105,10 @@ members = self for tarinfo in members: if verbose: - _safe_print(stat.filemode(tarinfo.mode)) + if tarinfo.mode is None: + _safe_print("??????????") + else: + _safe_print(stat.filemode(tarinfo.mode)) _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid, tarinfo.gname or tarinfo.gid)) if tarinfo.ischr() or tarinfo.isblk(): @@ -1948,8 +2116,11 @@ ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor))) else: _safe_print("%10d" % tarinfo.size) - _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ - % time.localtime(tarinfo.mtime)[:6]) + if tarinfo.mtime is None: + _safe_print("????-??-?? ??:??:??") + else: + _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6]) _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else "")) @@ -2036,32 +2207,58 @@ self.members.append(tarinfo) - def extractall(self, path=".", members=None, *, numeric_owner=False): + def _get_filter_function(self, filter): + if filter is None: + filter = self.extraction_filter + if filter is None: + return fully_trusted_filter + if isinstance(filter, str): + raise TypeError( + 'String names are not supported for ' + + 'TarFile.extraction_filter. Use a function such as ' + + 'tarfile.data_filter directly.') + return filter + if callable(filter): + return filter + try: + return _NAMED_FILTERS[filter] + except KeyError: + raise ValueError(f"filter {filter!r} not found") from None + + def extractall(self, path=".", members=None, *, numeric_owner=False, + filter=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). If `numeric_owner` is True, only the numbers for user/group names are used and not the names. + + The `filter` function will be called on each member just + before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. """ directories = [] + filter_function = self._get_filter_function(filter) if members is None: members = self - for tarinfo in members: + for member in members: + tarinfo = self._get_extract_tarinfo(member, filter_function, path) + if tarinfo is None: + continue if tarinfo.isdir(): - # Extract directories with a safe mode. + # For directories, delay setting attributes until later, + # since permissions can interfere with extraction and + # extracting contents can reset mtime. directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 0o700 - # Do not set_attrs directories, as we will do that further down - self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(), - numeric_owner=numeric_owner) + self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(), + numeric_owner=numeric_owner) # Reverse sort directories. - directories.sort(key=lambda a: a.name) - directories.reverse() + directories.sort(key=lambda a: a.name, reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: @@ -2071,12 +2268,10 @@ self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError as e: - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) + self._handle_nonfatal_error(e) - def extract(self, member, path="", set_attrs=True, *, numeric_owner=False): + def extract(self, member, path="", set_attrs=True, *, numeric_owner=False, + filter=None): """Extract a member from the archive to the current working directory, using its full name. Its file information is extracted as accurately as possible. `member' may be a filename or a TarInfo object. You can @@ -2084,35 +2279,70 @@ mtime, mode) are set unless `set_attrs' is False. If `numeric_owner` is True, only the numbers for user/group names are used and not the names. + + The `filter` function will be called before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. """ - self._check("r") + filter_function = self._get_filter_function(filter) + tarinfo = self._get_extract_tarinfo(member, filter_function, path) + if tarinfo is not None: + self._extract_one(tarinfo, path, set_attrs, numeric_owner) + def _get_extract_tarinfo(self, member, filter_function, path): + """Get filtered TarInfo (or None) from member, which might be a str""" if isinstance(member, str): tarinfo = self.getmember(member) else: tarinfo = member + unfiltered = tarinfo + try: + tarinfo = filter_function(tarinfo, path) + except (OSError, FilterError) as e: + self._handle_fatal_error(e) + except ExtractError as e: + self._handle_nonfatal_error(e) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % unfiltered.name) + return None # Prepare the link target for makelink(). if tarinfo.islnk(): + tarinfo = copy.copy(tarinfo) tarinfo._link_target = os.path.join(path, tarinfo.linkname) + return tarinfo + + def _extract_one(self, tarinfo, path, set_attrs, numeric_owner): + """Extract from filtered tarinfo to disk""" + self._check("r") try: self._extract_member(tarinfo, os.path.join(path, tarinfo.name), set_attrs=set_attrs, numeric_owner=numeric_owner) except OSError as e: - if self.errorlevel > 0: - raise - else: - if e.filename is None: - self._dbg(1, "tarfile: %s" % e.strerror) - else: - self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + self._handle_fatal_error(e) except ExtractError as e: - if self.errorlevel > 1: - raise + self._handle_nonfatal_error(e) + + def _handle_nonfatal_error(self, e): + """Handle non-fatal error (ExtractError) according to errorlevel""" + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def _handle_fatal_error(self, e): + """Handle "fatal" error according to self.errorlevel""" + if self.errorlevel > 0: + raise + elif isinstance(e, OSError): + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) else: - self._dbg(1, "tarfile: %s" % e) + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + else: + self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e)) def extractfile(self, member): """Extract a member from the archive as a file object. `member' may be @@ -2199,9 +2429,13 @@ """Make a directory called targetpath. """ try: - # Use a safe mode for the directory, the real mode is set - # later in _extract_member(). - os.mkdir(targetpath, 0o700) + if tarinfo.mode is None: + # Use the system's default mode + os.mkdir(targetpath) + else: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) except FileExistsError: pass @@ -2244,6 +2478,9 @@ raise ExtractError("special devices not supported by system") mode = tarinfo.mode + if mode is None: + # Use mknod's default + mode = 0o600 if tarinfo.isblk(): mode |= stat.S_IFBLK else: @@ -2265,7 +2502,6 @@ os.unlink(targetpath) os.symlink(tarinfo.linkname, targetpath) else: - # See extract(). if os.path.exists(tarinfo._link_target): os.link(tarinfo._link_target, targetpath) else: @@ -2290,15 +2526,19 @@ u = tarinfo.uid if not numeric_owner: try: - if grp: + if grp and tarinfo.gname: g = grp.getgrnam(tarinfo.gname)[2] except KeyError: pass try: - if pwd: + if pwd and tarinfo.uname: u = pwd.getpwnam(tarinfo.uname)[2] except KeyError: pass + if g is None: + g = -1 + if u is None: + u = -1 try: if tarinfo.issym() and hasattr(os, "lchown"): os.lchown(targetpath, u, g) @@ -2310,6 +2550,8 @@ def chmod(self, tarinfo, targetpath): """Set file permissions of targetpath according to tarinfo. """ + if tarinfo.mode is None: + return try: os.chmod(targetpath, tarinfo.mode) except OSError as e: @@ -2318,10 +2560,13 @@ def utime(self, tarinfo, targetpath): """Set modification time of targetpath according to tarinfo. """ + mtime = tarinfo.mtime + if mtime is None: + return if not hasattr(os, 'utime'): return try: - os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + os.utime(targetpath, (mtime, mtime)) except OSError as e: raise ExtractError("could not change modification time") from e @@ -2397,13 +2642,26 @@ members = self.getmembers() # Limit the member search list up to tarinfo. + skipping = False if tarinfo is not None: - members = members[:members.index(tarinfo)] + try: + index = members.index(tarinfo) + except ValueError: + # The given starting point might be a (modified) copy. + # We'll later skip members until we find an equivalent. + skipping = True + else: + # Happy fast path + members = members[:index] if normalize: name = os.path.normpath(name) for member in reversed(members): + if skipping: + if tarinfo.offset == member.offset: + skipping = False + continue if normalize: member_name = os.path.normpath(member.name) else: @@ -2412,6 +2670,10 @@ if name == member_name: return member + if skipping: + # Starting point was not found + raise ValueError(tarinfo) + def _load(self): """Read through the entire archive file and look for readable members. @@ -2504,6 +2766,7 @@ #-------------------- # exported functions #-------------------- + def is_tarfile(name): """Return True if name points to a tar archive that we are able to handle, else return False. @@ -2530,6 +2793,10 @@ parser = argparse.ArgumentParser(description=description) parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose output') + parser.add_argument('--filter', metavar='', + choices=_NAMED_FILTERS, + help='Filter for extraction') + group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-l', '--list', metavar='', help='Show listing of a tarfile') @@ -2541,8 +2808,12 @@ help='Create tarfile from sources') group.add_argument('-t', '--test', metavar='', help='Test if a tarfile is valid') + args = parser.parse_args() + if args.filter and args.extract is None: + parser.exit(1, '--filter is only valid for extraction\n') + if args.test is not None: src = args.test if is_tarfile(src): @@ -2573,7 +2844,7 @@ if is_tarfile(src): with TarFile.open(src, 'r:*') as tf: - tf.extractall(path=curdir) + tf.extractall(path=curdir, filter=args.filter) if args.verbose: if curdir == '.': msg = '{!r} file is extracted.'.format(src) diff -Nru python3.10-3.10.11/Lib/test/test_httpservers.py python3.10-3.10.12/Lib/test/test_httpservers.py --- python3.10-3.10.11/Lib/test/test_httpservers.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/test/test_httpservers.py 2023-06-06 22:30:33.000000000 +0000 @@ -417,6 +417,14 @@ self.check_status_and_reason(response, HTTPStatus.OK, data=os_helper.TESTFN_UNDECODABLE) + def test_undecodable_parameter(self): + # sanity check using a valid parameter + response = self.request(self.base_url + '/?x=123').read() + self.assertRegex(response, f'listing for {self.base_url}/\?x=123'.encode('latin1')) + # now the bogus encoding + response = self.request(self.base_url + '/?x=%bb').read() + self.assertRegex(response, f'listing for {self.base_url}/\?x=\xef\xbf\xbd'.encode('latin1')) + def test_get_dir_redirect_location_domain_injection_bug(self): """Ensure //evil.co/..%2f../../X does not put //evil.co/ in Location. diff -Nru python3.10-3.10.11/Lib/test/test_shutil.py python3.10-3.10.12/Lib/test/test_shutil.py --- python3.10-3.10.11/Lib/test/test_shutil.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/test/test_shutil.py 2023-06-06 22:30:33.000000000 +0000 @@ -32,6 +32,7 @@ from test import support from test.support import os_helper from test.support.os_helper import TESTFN, FakePath +from test.support import warnings_helper TESTFN2 = TESTFN + "2" TESTFN_SRC = TESTFN + "_SRC" @@ -1610,12 +1611,14 @@ ### shutil.unpack_archive - def check_unpack_archive(self, format): - self.check_unpack_archive_with_converter(format, lambda path: path) - self.check_unpack_archive_with_converter(format, pathlib.Path) - self.check_unpack_archive_with_converter(format, FakePath) + def check_unpack_archive(self, format, **kwargs): + self.check_unpack_archive_with_converter( + format, lambda path: path, **kwargs) + self.check_unpack_archive_with_converter( + format, pathlib.Path, **kwargs) + self.check_unpack_archive_with_converter(format, FakePath, **kwargs) - def check_unpack_archive_with_converter(self, format, converter): + def check_unpack_archive_with_converter(self, format, converter, **kwargs): root_dir, base_dir = self._create_files() expected = rlistdir(root_dir) expected.remove('outer') @@ -1625,36 +1628,47 @@ # let's try to unpack it now tmpdir2 = self.mkdtemp() - unpack_archive(converter(filename), converter(tmpdir2)) + unpack_archive(converter(filename), converter(tmpdir2), **kwargs) self.assertEqual(rlistdir(tmpdir2), expected) # and again, this time with the format specified tmpdir3 = self.mkdtemp() - unpack_archive(converter(filename), converter(tmpdir3), format=format) + unpack_archive(converter(filename), converter(tmpdir3), format=format, + **kwargs) self.assertEqual(rlistdir(tmpdir3), expected) - self.assertRaises(shutil.ReadError, unpack_archive, converter(TESTFN)) - self.assertRaises(ValueError, unpack_archive, converter(TESTFN), format='xxx') + with self.assertRaises(shutil.ReadError): + unpack_archive(converter(TESTFN), **kwargs) + with self.assertRaises(ValueError): + unpack_archive(converter(TESTFN), format='xxx', **kwargs) + + def check_unpack_tarball(self, format): + self.check_unpack_archive(format, filter='fully_trusted') + self.check_unpack_archive(format, filter='data') + with warnings_helper.check_no_warnings(self): + self.check_unpack_archive(format) def test_unpack_archive_tar(self): - self.check_unpack_archive('tar') + self.check_unpack_tarball('tar') @support.requires_zlib() def test_unpack_archive_gztar(self): - self.check_unpack_archive('gztar') + self.check_unpack_tarball('gztar') @support.requires_bz2() def test_unpack_archive_bztar(self): - self.check_unpack_archive('bztar') + self.check_unpack_tarball('bztar') @support.requires_lzma() @unittest.skipIf(AIX and not _maxdataOK(), "AIX MAXDATA must be 0x20000000 or larger") def test_unpack_archive_xztar(self): - self.check_unpack_archive('xztar') + self.check_unpack_tarball('xztar') @support.requires_zlib() def test_unpack_archive_zip(self): self.check_unpack_archive('zip') + with self.assertRaises(TypeError): + self.check_unpack_archive('zip', filter='data') def test_unpack_registry(self): diff -Nru python3.10-3.10.11/Lib/test/test_subprocess.py python3.10-3.10.12/Lib/test/test_subprocess.py --- python3.10-3.10.11/Lib/test/test_subprocess.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/test/test_subprocess.py 2023-06-06 22:30:33.000000000 +0000 @@ -1,6 +1,7 @@ import unittest from unittest import mock from test import support +from test.support import check_sanitizer from test.support import import_helper from test.support import os_helper from test.support import warnings_helper @@ -774,6 +775,8 @@ @unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') == 1, 'The Python shared library cannot be loaded ' 'with an empty environment.') + @unittest.skipIf(check_sanitizer(address=True), + 'AddressSanitizer adds to the environment.') def test_empty_env(self): """Verify that env={} is as empty as possible.""" diff -Nru python3.10-3.10.11/Lib/test/test_tarfile.py python3.10-3.10.12/Lib/test/test_tarfile.py --- python3.10-3.10.11/Lib/test/test_tarfile.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/test/test_tarfile.py 2023-06-06 22:30:33.000000000 +0000 @@ -5,6 +5,10 @@ from contextlib import contextmanager from random import Random import pathlib +import shutil +import re +import warnings +import stat import unittest import unittest.mock @@ -13,6 +17,7 @@ from test import support from test.support import os_helper from test.support import script_helper +from test.support import warnings_helper # Check for our compression modules. try: @@ -108,7 +113,7 @@ "regular file extraction failed") def test_fileobj_readlines(self): - self.tar.extract("ustar/regtype", TEMPDIR) + self.tar.extract("ustar/regtype", TEMPDIR, filter='data') tarinfo = self.tar.getmember("ustar/regtype") with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1: lines1 = fobj1.readlines() @@ -126,7 +131,7 @@ "fileobj.readlines() failed") def test_fileobj_iter(self): - self.tar.extract("ustar/regtype", TEMPDIR) + self.tar.extract("ustar/regtype", TEMPDIR, filter='data') tarinfo = self.tar.getmember("ustar/regtype") with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1: lines1 = fobj1.readlines() @@ -136,7 +141,8 @@ "fileobj.__iter__() failed") def test_fileobj_seek(self): - self.tar.extract("ustar/regtype", TEMPDIR) + self.tar.extract("ustar/regtype", TEMPDIR, + filter='data') with open(os.path.join(TEMPDIR, "ustar/regtype"), "rb") as fobj: data = fobj.read() @@ -455,7 +461,7 @@ t = tar.next() with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"): - tar.extract(t, TEMPDIR) + tar.extract(t, TEMPDIR, filter='data') with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"): tar.extractfile(t).read() @@ -610,16 +616,16 @@ def test_extract_hardlink(self): # Test hardlink extraction (e.g. bug #857297). with tarfile.open(tarname, errorlevel=1, encoding="iso8859-1") as tar: - tar.extract("ustar/regtype", TEMPDIR) + tar.extract("ustar/regtype", TEMPDIR, filter='data') self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/regtype")) - tar.extract("ustar/lnktype", TEMPDIR) + tar.extract("ustar/lnktype", TEMPDIR, filter='data') self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/lnktype")) with open(os.path.join(TEMPDIR, "ustar/lnktype"), "rb") as f: data = f.read() self.assertEqual(sha256sum(data), sha256_regtype) - tar.extract("ustar/symtype", TEMPDIR) + tar.extract("ustar/symtype", TEMPDIR, filter='data') self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/symtype")) with open(os.path.join(TEMPDIR, "ustar/symtype"), "rb") as f: data = f.read() @@ -633,13 +639,14 @@ os.mkdir(DIR) try: directories = [t for t in tar if t.isdir()] - tar.extractall(DIR, directories) + tar.extractall(DIR, directories, filter='fully_trusted') for tarinfo in directories: path = os.path.join(DIR, tarinfo.name) if sys.platform != "win32": # Win32 has no support for fine grained permissions. self.assertEqual(tarinfo.mode & 0o777, - os.stat(path).st_mode & 0o777) + os.stat(path).st_mode & 0o777, + tarinfo.name) def format_mtime(mtime): if isinstance(mtime, float): return "{} ({})".format(mtime, mtime.hex()) @@ -662,7 +669,7 @@ try: with tarfile.open(tarname, encoding="iso8859-1") as tar: tarinfo = tar.getmember(dirtype) - tar.extract(tarinfo, path=DIR) + tar.extract(tarinfo, path=DIR, filter='fully_trusted') extracted = os.path.join(DIR, dirtype) self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime) if sys.platform != "win32": @@ -675,7 +682,7 @@ with os_helper.temp_dir(DIR), \ tarfile.open(tarname, encoding="iso8859-1") as tar: directories = [t for t in tar if t.isdir()] - tar.extractall(DIR, directories) + tar.extractall(DIR, directories, filter='fully_trusted') for tarinfo in directories: path = DIR / tarinfo.name self.assertEqual(os.path.getmtime(path), tarinfo.mtime) @@ -686,7 +693,7 @@ with os_helper.temp_dir(DIR), \ tarfile.open(tarname, encoding="iso8859-1") as tar: tarinfo = tar.getmember(dirtype) - tar.extract(tarinfo, path=DIR) + tar.extract(tarinfo, path=DIR, filter='fully_trusted') extracted = DIR / dirtype self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime) @@ -1042,7 +1049,7 @@ # an all platforms, and after that a test that will work only on # platforms/filesystems that prove to support sparse files. def _test_sparse_file(self, name): - self.tar.extract(name, TEMPDIR) + self.tar.extract(name, TEMPDIR, filter='data') filename = os.path.join(TEMPDIR, name) with open(filename, "rb") as fobj: data = fobj.read() @@ -1409,7 +1416,8 @@ with tarfile.open(temparchive, errorlevel=2) as tar: # this should not raise OSError: [Errno 17] File exists try: - tar.extractall(path=tempdir) + tar.extractall(path=tempdir, + filter='fully_trusted') except OSError: self.fail("extractall failed with symlinked files") finally: @@ -2406,7 +2414,12 @@ 'PAX_NUMBER_FIELDS', 'stn', 'nts', 'nti', 'itn', 'calc_chksums', 'copyfileobj', 'filemode', 'EmptyHeaderError', 'TruncatedHeaderError', 'EOFHeaderError', 'InvalidHeaderError', - 'SubsequentHeaderError', 'ExFileObject', 'main'} + 'SubsequentHeaderError', 'ExFileObject', 'main', + "fully_trusted_filter", "data_filter", + "tar_filter", "FilterError", "AbsoluteLinkError", + "OutsideDestinationError", "SpecialFileError", "AbsolutePathError", + "LinkOutsideDestinationError", + } support.check__all__(self, tarfile, not_exported=not_exported) def test_useful_error_message_when_modules_missing(self): @@ -2441,6 +2454,15 @@ for tardata in files: tf.add(tardata, arcname=os.path.basename(tardata)) + def make_evil_tarfile(self, tar_name): + files = [support.findfile('tokenize_tests.txt')] + self.addCleanup(os_helper.unlink, tar_name) + with tarfile.open(tar_name, 'w') as tf: + benign = tarfile.TarInfo('benign') + tf.addfile(benign, fileobj=io.BytesIO(b'')) + evil = tarfile.TarInfo('../evil') + tf.addfile(evil, fileobj=io.BytesIO(b'')) + def test_bad_use(self): rc, out, err = self.tarfilecmd_failure() self.assertEqual(out, b'') @@ -2597,6 +2619,25 @@ finally: os_helper.rmtree(tarextdir) + def test_extract_command_filter(self): + self.make_evil_tarfile(tmpname) + # Make an inner directory, so the member named '../evil' + # is still extracted into `tarextdir` + destdir = os.path.join(tarextdir, 'dest') + os.mkdir(tarextdir) + try: + with os_helper.temp_cwd(destdir): + self.tarfilecmd_failure('-e', tmpname, + '-v', + '--filter', 'data') + out = self.tarfilecmd('-e', tmpname, + '-v', + '--filter', 'fully_trusted', + PYTHONIOENCODING='utf-8') + self.assertIn(b' file is extracted.', out) + finally: + os_helper.rmtree(tarextdir) + def test_extract_command_different_directory(self): self.make_simple_tarfile(tmpname) try: @@ -2680,7 +2721,7 @@ # symbolic or hard links tarfile tries to extract these types of members # as the regular files they point to. def _test_link_extraction(self, name): - self.tar.extract(name, TEMPDIR) + self.tar.extract(name, TEMPDIR, filter='fully_trusted') with open(os.path.join(TEMPDIR, name), "rb") as f: data = f.read() self.assertEqual(sha256sum(data), sha256_regtype) @@ -2812,8 +2853,10 @@ mock_chown): with self._setup_test(mock_geteuid) as (tarfl, filename_1, _, filename_2): - tarfl.extract(filename_1, TEMPDIR, numeric_owner=True) - tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True) + tarfl.extract(filename_1, TEMPDIR, numeric_owner=True, + filter='fully_trusted') + tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True, + filter='fully_trusted') # convert to filesystem paths f_filename_1 = os.path.join(TEMPDIR, filename_1) @@ -2831,7 +2874,8 @@ mock_chown): with self._setup_test(mock_geteuid) as (tarfl, filename_1, dirname_1, filename_2): - tarfl.extractall(TEMPDIR, numeric_owner=True) + tarfl.extractall(TEMPDIR, numeric_owner=True, + filter='fully_trusted') # convert to filesystem paths f_filename_1 = os.path.join(TEMPDIR, filename_1) @@ -2856,7 +2900,8 @@ def test_extract_without_numeric_owner(self, mock_geteuid, mock_chmod, mock_chown): with self._setup_test(mock_geteuid) as (tarfl, filename_1, _, _): - tarfl.extract(filename_1, TEMPDIR, numeric_owner=False) + tarfl.extract(filename_1, TEMPDIR, numeric_owner=False, + filter='fully_trusted') # convert to filesystem paths f_filename_1 = os.path.join(TEMPDIR, filename_1) @@ -2870,6 +2915,893 @@ tarfl.extract, filename_1, TEMPDIR, False, True) +class ReplaceTests(ReadTest, unittest.TestCase): + def test_replace_name(self): + member = self.tar.getmember('ustar/regtype') + replaced = member.replace(name='misc/other') + self.assertEqual(replaced.name, 'misc/other') + self.assertEqual(member.name, 'ustar/regtype') + self.assertEqual(self.tar.getmember('ustar/regtype').name, + 'ustar/regtype') + + def test_replace_deep(self): + member = self.tar.getmember('pax/regtype1') + replaced = member.replace() + replaced.pax_headers['gname'] = 'not-bar' + self.assertEqual(member.pax_headers['gname'], 'bar') + self.assertEqual( + self.tar.getmember('pax/regtype1').pax_headers['gname'], 'bar') + + def test_replace_shallow(self): + member = self.tar.getmember('pax/regtype1') + replaced = member.replace(deep=False) + replaced.pax_headers['gname'] = 'not-bar' + self.assertEqual(member.pax_headers['gname'], 'not-bar') + self.assertEqual( + self.tar.getmember('pax/regtype1').pax_headers['gname'], 'not-bar') + + def test_replace_all(self): + member = self.tar.getmember('ustar/regtype') + for attr_name in ('name', 'mtime', 'mode', 'linkname', + 'uid', 'gid', 'uname', 'gname'): + with self.subTest(attr_name=attr_name): + replaced = member.replace(**{attr_name: None}) + self.assertEqual(getattr(replaced, attr_name), None) + self.assertNotEqual(getattr(member, attr_name), None) + + def test_replace_internal(self): + member = self.tar.getmember('ustar/regtype') + with self.assertRaises(TypeError): + member.replace(offset=123456789) + + +class NoneInfoExtractTests(ReadTest): + # These mainly check that all kinds of members are extracted successfully + # if some metadata is None. + # Some of the methods do additional spot checks. + + # We also test that the default filters can deal with None. + + extraction_filter = None + + @classmethod + def setUpClass(cls): + tar = tarfile.open(tarname, mode='r', encoding="iso8859-1") + cls.control_dir = pathlib.Path(TEMPDIR) / "extractall_ctrl" + tar.errorlevel = 0 + tar.extractall(cls.control_dir, filter=cls.extraction_filter) + tar.close() + cls.control_paths = set( + p.relative_to(cls.control_dir) + for p in pathlib.Path(cls.control_dir).glob('**/*')) + + @classmethod + def tearDownClass(cls): + shutil.rmtree(cls.control_dir) + + def check_files_present(self, directory): + got_paths = set( + p.relative_to(directory) + for p in pathlib.Path(directory).glob('**/*')) + self.assertEqual(self.control_paths, got_paths) + + @contextmanager + def extract_with_none(self, *attr_names): + DIR = pathlib.Path(TEMPDIR) / "extractall_none" + self.tar.errorlevel = 0 + for member in self.tar.getmembers(): + for attr_name in attr_names: + setattr(member, attr_name, None) + with os_helper.temp_dir(DIR): + self.tar.extractall(DIR, filter='fully_trusted') + self.check_files_present(DIR) + yield DIR + + def test_extractall_none_mtime(self): + # mtimes of extracted files should be later than 'now' -- the mtime + # of a previously created directory. + now = pathlib.Path(TEMPDIR).stat().st_mtime + with self.extract_with_none('mtime') as DIR: + for path in pathlib.Path(DIR).glob('**/*'): + with self.subTest(path=path): + try: + mtime = path.stat().st_mtime + except OSError: + # Some systems can't stat symlinks, ignore those + if not path.is_symlink(): + raise + else: + self.assertGreaterEqual(path.stat().st_mtime, now) + + def test_extractall_none_mode(self): + # modes of directories and regular files should match the mode + # of a "normally" created directory or regular file + dir_mode = pathlib.Path(TEMPDIR).stat().st_mode + regular_file = pathlib.Path(TEMPDIR) / 'regular_file' + regular_file.write_text('') + regular_file_mode = regular_file.stat().st_mode + with self.extract_with_none('mode') as DIR: + for path in pathlib.Path(DIR).glob('**/*'): + with self.subTest(path=path): + if path.is_dir(): + self.assertEqual(path.stat().st_mode, dir_mode) + elif path.is_file(): + self.assertEqual(path.stat().st_mode, + regular_file_mode) + + def test_extractall_none_uid(self): + with self.extract_with_none('uid'): + pass + + def test_extractall_none_gid(self): + with self.extract_with_none('gid'): + pass + + def test_extractall_none_uname(self): + with self.extract_with_none('uname'): + pass + + def test_extractall_none_gname(self): + with self.extract_with_none('gname'): + pass + + def test_extractall_none_ownership(self): + with self.extract_with_none('uid', 'gid', 'uname', 'gname'): + pass + +class NoneInfoExtractTests_Data(NoneInfoExtractTests, unittest.TestCase): + extraction_filter = 'data' + +class NoneInfoExtractTests_FullyTrusted(NoneInfoExtractTests, + unittest.TestCase): + extraction_filter = 'fully_trusted' + +class NoneInfoExtractTests_Tar(NoneInfoExtractTests, unittest.TestCase): + extraction_filter = 'tar' + +class NoneInfoExtractTests_Default(NoneInfoExtractTests, + unittest.TestCase): + extraction_filter = None + +class NoneInfoTests_Misc(unittest.TestCase): + def test_add(self): + # When addfile() encounters None metadata, it raises a ValueError + bio = io.BytesIO() + for tarformat in (tarfile.USTAR_FORMAT, tarfile.GNU_FORMAT, + tarfile.PAX_FORMAT): + with self.subTest(tarformat=tarformat): + tar = tarfile.open(fileobj=bio, mode='w', format=tarformat) + tarinfo = tar.gettarinfo(tarname) + try: + tar.addfile(tarinfo) + except Exception: + if tarformat == tarfile.USTAR_FORMAT: + # In the old, limited format, adding might fail for + # reasons like the UID being too large + pass + else: + raise + else: + for attr_name in ('mtime', 'mode', 'uid', 'gid', + 'uname', 'gname'): + with self.subTest(attr_name=attr_name): + replaced = tarinfo.replace(**{attr_name: None}) + with self.assertRaisesRegex(ValueError, + f"{attr_name}"): + tar.addfile(replaced) + + def test_list(self): + # Change some metadata to None, then compare list() output + # word-for-word. We want list() to not raise, and to only change + # printout for the affected piece of metadata. + # (n.b.: some contents of the test archive are hardcoded.) + for attr_names in ({'mtime'}, {'mode'}, {'uid'}, {'gid'}, + {'uname'}, {'gname'}, + {'uid', 'uname'}, {'gid', 'gname'}): + with (self.subTest(attr_names=attr_names), + tarfile.open(tarname, encoding="iso8859-1") as tar): + tio_prev = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n') + with support.swap_attr(sys, 'stdout', tio_prev): + tar.list() + for member in tar.getmembers(): + for attr_name in attr_names: + setattr(member, attr_name, None) + tio_new = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n') + with support.swap_attr(sys, 'stdout', tio_new): + tar.list() + for expected, got in zip(tio_prev.detach().getvalue().split(), + tio_new.detach().getvalue().split()): + if attr_names == {'mtime'} and re.match(rb'2003-01-\d\d', expected): + self.assertEqual(got, b'????-??-??') + elif attr_names == {'mtime'} and re.match(rb'\d\d:\d\d:\d\d', expected): + self.assertEqual(got, b'??:??:??') + elif attr_names == {'mode'} and re.match( + rb'.([r-][w-][x-]){3}', expected): + self.assertEqual(got, b'??????????') + elif attr_names == {'uname'} and expected.startswith( + (b'tarfile/', b'lars/', b'foo/')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_group, exp_group) + self.assertRegex(got_user, b'[0-9]+') + elif attr_names == {'gname'} and expected.endswith( + (b'/tarfile', b'/users', b'/bar')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_user, exp_user) + self.assertRegex(got_group, b'[0-9]+') + elif attr_names == {'uid'} and expected.startswith( + (b'1000/')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_group, exp_group) + self.assertEqual(got_user, b'None') + elif attr_names == {'gid'} and expected.endswith((b'/100')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_user, exp_user) + self.assertEqual(got_group, b'None') + elif attr_names == {'uid', 'uname'} and expected.startswith( + (b'tarfile/', b'lars/', b'foo/', b'1000/')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_group, exp_group) + self.assertEqual(got_user, b'None') + elif attr_names == {'gname', 'gid'} and expected.endswith( + (b'/tarfile', b'/users', b'/bar', b'/100')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_user, exp_user) + self.assertEqual(got_group, b'None') + else: + # In other cases the output should be the same + self.assertEqual(expected, got) + +def _filemode_to_int(mode): + """Inverse of `stat.filemode` (for permission bits) + + Using mode strings rather than numbers makes the later tests more readable. + """ + str_mode = mode[1:] + result = ( + {'r': stat.S_IRUSR, '-': 0}[str_mode[0]] + | {'w': stat.S_IWUSR, '-': 0}[str_mode[1]] + | {'x': stat.S_IXUSR, '-': 0, + 's': stat.S_IXUSR | stat.S_ISUID, + 'S': stat.S_ISUID}[str_mode[2]] + | {'r': stat.S_IRGRP, '-': 0}[str_mode[3]] + | {'w': stat.S_IWGRP, '-': 0}[str_mode[4]] + | {'x': stat.S_IXGRP, '-': 0, + 's': stat.S_IXGRP | stat.S_ISGID, + 'S': stat.S_ISGID}[str_mode[5]] + | {'r': stat.S_IROTH, '-': 0}[str_mode[6]] + | {'w': stat.S_IWOTH, '-': 0}[str_mode[7]] + | {'x': stat.S_IXOTH, '-': 0, + 't': stat.S_IXOTH | stat.S_ISVTX, + 'T': stat.S_ISVTX}[str_mode[8]] + ) + # check we did this right + assert stat.filemode(result)[1:] == mode[1:] + + return result + +class ArchiveMaker: + """Helper to create a tar file with specific contents + + Usage: + + with ArchiveMaker() as t: + t.add('filename', ...) + + with t.open() as tar: + ... # `tar` is now a TarFile with 'filename' in it! + """ + def __init__(self): + self.bio = io.BytesIO() + + def __enter__(self): + self.tar_w = tarfile.TarFile(mode='w', fileobj=self.bio) + return self + + def __exit__(self, *exc): + self.tar_w.close() + self.contents = self.bio.getvalue() + self.bio = None + + def add(self, name, *, type=None, symlink_to=None, hardlink_to=None, + mode=None, **kwargs): + """Add a member to the test archive. Call within `with`.""" + name = str(name) + tarinfo = tarfile.TarInfo(name).replace(**kwargs) + if mode: + tarinfo.mode = _filemode_to_int(mode) + if symlink_to is not None: + type = tarfile.SYMTYPE + tarinfo.linkname = str(symlink_to) + if hardlink_to is not None: + type = tarfile.LNKTYPE + tarinfo.linkname = str(hardlink_to) + if name.endswith('/') and type is None: + type = tarfile.DIRTYPE + if type is not None: + tarinfo.type = type + if tarinfo.isreg(): + fileobj = io.BytesIO(bytes(tarinfo.size)) + else: + fileobj = None + self.tar_w.addfile(tarinfo, fileobj) + + def open(self, **kwargs): + """Open the resulting archive as TarFile. Call after `with`.""" + bio = io.BytesIO(self.contents) + return tarfile.open(fileobj=bio, **kwargs) + + +class TestExtractionFilters(unittest.TestCase): + + # A temporary directory for the extraction results. + # All files that "escape" the destination path should still end + # up in this directory. + outerdir = pathlib.Path(TEMPDIR) / 'outerdir' + + # The destination for the extraction, within `outerdir` + destdir = outerdir / 'dest' + + @contextmanager + def check_context(self, tar, filter): + """Extracts `tar` to `self.destdir` and allows checking the result + + If an error occurs, it must be checked using `expect_exception` + + Otherwise, all resulting files must be checked using `expect_file`, + except the destination directory itself and parent directories of + other files. + When checking directories, do so before their contents. + """ + with os_helper.temp_dir(self.outerdir): + try: + tar.extractall(self.destdir, filter=filter) + except Exception as exc: + self.raised_exception = exc + self.expected_paths = set() + else: + self.raised_exception = None + self.expected_paths = set(self.outerdir.glob('**/*')) + self.expected_paths.discard(self.destdir) + try: + yield + finally: + tar.close() + if self.raised_exception: + raise self.raised_exception + self.assertEqual(self.expected_paths, set()) + + def expect_file(self, name, type=None, symlink_to=None, mode=None): + """Check a single file. See check_context.""" + if self.raised_exception: + raise self.raised_exception + # use normpath() rather than resolve() so we don't follow symlinks + path = pathlib.Path(os.path.normpath(self.destdir / name)) + self.assertIn(path, self.expected_paths) + self.expected_paths.remove(path) + + # When checking mode, ignore Windows (which can only set user read and + # user write bits). Newer versions of Python use `os_helper.can_chmod()` + # instead of hardcoding Windows. + if mode is not None and sys.platform != 'win32': + got = stat.filemode(stat.S_IMODE(path.stat().st_mode)) + self.assertEqual(got, mode) + + if type is None and isinstance(name, str) and name.endswith('/'): + type = tarfile.DIRTYPE + if symlink_to is not None: + got = (self.destdir / name).readlink() + expected = pathlib.Path(symlink_to) + # The symlink might be the same (textually) as what we expect, + # but some systems change the link to an equivalent path, so + # we fall back to samefile(). + if expected != got: + self.assertTrue(got.samefile(expected)) + elif type == tarfile.REGTYPE or type is None: + self.assertTrue(path.is_file()) + elif type == tarfile.DIRTYPE: + self.assertTrue(path.is_dir()) + elif type == tarfile.FIFOTYPE: + self.assertTrue(path.is_fifo()) + else: + raise NotImplementedError(type) + for parent in path.parents: + self.expected_paths.discard(parent) + + def expect_exception(self, exc_type, message_re='.'): + with self.assertRaisesRegex(exc_type, message_re): + if self.raised_exception is not None: + raise self.raised_exception + self.raised_exception = None + + def test_benign_file(self): + with ArchiveMaker() as arc: + arc.add('benign.txt') + for filter in 'fully_trusted', 'tar', 'data': + with self.check_context(arc.open(), filter): + self.expect_file('benign.txt') + + def test_absolute(self): + # Test handling a member with an absolute path + # Inspired by 'absolute1' in https://github.com/jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add(self.outerdir / 'escaped.evil') + + with self.check_context(arc.open(), 'fully_trusted'): + self.expect_file('../escaped.evil') + + for filter in 'tar', 'data': + with self.check_context(arc.open(), filter): + if str(self.outerdir).startswith('/'): + # We strip leading slashes, as e.g. GNU tar does + # (without --absolute-filenames). + outerdir_stripped = str(self.outerdir).lstrip('/') + self.expect_file(f'{outerdir_stripped}/escaped.evil') + else: + # On this system, absolute paths don't have leading + # slashes. + # So, there's nothing to strip. We refuse to unpack + # to an absolute path, nonetheless. + self.expect_exception( + tarfile.AbsolutePathError, + """['"].*escaped.evil['"] has an absolute path""") + + def test_parent_symlink(self): + # Test interplaying symlinks + # Inspired by 'dirsymlink2a' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('current', symlink_to='.') + arc.add('parent', symlink_to='current/..') + arc.add('parent/evil') + + if os_helper.can_symlink(): + with self.check_context(arc.open(), 'fully_trusted'): + if self.raised_exception is not None: + # Windows will refuse to create a file that's a symlink to itself + # (and tarfile doesn't swallow that exception) + self.expect_exception(FileExistsError) + # The other cases will fail with this error too. + # Skip the rest of this test. + return + else: + self.expect_file('current', symlink_to='.') + self.expect_file('parent', symlink_to='current/..') + self.expect_file('../evil') + + with self.check_context(arc.open(), 'tar'): + self.expect_exception( + tarfile.OutsideDestinationError, + """'parent/evil' would be extracted to ['"].*evil['"], """ + + "which is outside the destination") + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.LinkOutsideDestinationError, + """'parent' would link to ['"].*outerdir['"], """ + + "which is outside the destination") + + else: + # No symlink support. The symlinks are ignored. + with self.check_context(arc.open(), 'fully_trusted'): + self.expect_file('parent/evil') + with self.check_context(arc.open(), 'tar'): + self.expect_file('parent/evil') + with self.check_context(arc.open(), 'data'): + self.expect_file('parent/evil') + + def test_parent_symlink2(self): + # Test interplaying symlinks + # Inspired by 'dirsymlink2b' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('current', symlink_to='.') + arc.add('current/parent', symlink_to='..') + arc.add('parent/evil') + + with self.check_context(arc.open(), 'fully_trusted'): + if os_helper.can_symlink(): + self.expect_file('current', symlink_to='.') + self.expect_file('parent', symlink_to='..') + self.expect_file('../evil') + else: + self.expect_file('current/') + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'tar'): + if os_helper.can_symlink(): + self.expect_exception( + tarfile.OutsideDestinationError, + "'parent/evil' would be extracted to " + + """['"].*evil['"], which is outside """ + + "the destination") + else: + self.expect_file('current/') + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.LinkOutsideDestinationError, + """'current/parent' would link to ['"].*['"], """ + + "which is outside the destination") + + def test_absolute_symlink(self): + # Test symlink to an absolute path + # Inspired by 'dirsymlink' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('parent', symlink_to=self.outerdir) + arc.add('parent/evil') + + with self.check_context(arc.open(), 'fully_trusted'): + if os_helper.can_symlink(): + self.expect_file('parent', symlink_to=self.outerdir) + self.expect_file('../evil') + else: + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'tar'): + if os_helper.can_symlink(): + self.expect_exception( + tarfile.OutsideDestinationError, + "'parent/evil' would be extracted to " + + """['"].*evil['"], which is outside """ + + "the destination") + else: + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.AbsoluteLinkError, + "'parent' is a symlink to an absolute path") + + def test_sly_relative0(self): + # Inspired by 'relative0' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('../moo', symlink_to='..//tmp/moo') + + try: + with self.check_context(arc.open(), filter='fully_trusted'): + if os_helper.can_symlink(): + if isinstance(self.raised_exception, FileExistsError): + # XXX TarFile happens to fail creating a parent + # directory. + # This might be a bug, but fixing it would hurt + # security. + # Note that e.g. GNU `tar` rejects '..' components, + # so you could argue this is an invalid archive and we + # just raise an bad type of exception. + self.expect_exception(FileExistsError) + else: + self.expect_file('../moo', symlink_to='..//tmp/moo') + else: + # The symlink can't be extracted and is ignored + pass + except FileExistsError: + pass + + for filter in 'tar', 'data': + with self.check_context(arc.open(), filter): + self.expect_exception( + tarfile.OutsideDestinationError, + "'../moo' would be extracted to " + + "'.*moo', which is outside " + + "the destination") + + def test_sly_relative2(self): + # Inspired by 'relative2' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('tmp/') + arc.add('tmp/../../moo', symlink_to='tmp/../..//tmp/moo') + + with self.check_context(arc.open(), 'fully_trusted'): + self.expect_file('tmp', type=tarfile.DIRTYPE) + if os_helper.can_symlink(): + self.expect_file('../moo', symlink_to='tmp/../../tmp/moo') + + for filter in 'tar', 'data': + with self.check_context(arc.open(), filter): + self.expect_exception( + tarfile.OutsideDestinationError, + "'tmp/../../moo' would be extracted to " + + """['"].*moo['"], which is outside the """ + + "destination") + + def test_modes(self): + # Test how file modes are extracted + # (Note that the modes are ignored on platforms without working chmod) + with ArchiveMaker() as arc: + arc.add('all_bits', mode='?rwsrwsrwt') + arc.add('perm_bits', mode='?rwxrwxrwx') + arc.add('exec_group_other', mode='?rw-rwxrwx') + arc.add('read_group_only', mode='?---r-----') + arc.add('no_bits', mode='?---------') + arc.add('dir/', mode='?---rwsrwt') + + # On some systems, setting the sticky bit is a no-op. + # Check if that's the case. + tmp_filename = os.path.join(TEMPDIR, "tmp.file") + with open(tmp_filename, 'w'): + pass + os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX) + have_sticky_files = (os.stat(tmp_filename).st_mode & stat.S_ISVTX) + os.unlink(tmp_filename) + + os.mkdir(tmp_filename) + os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX) + have_sticky_dirs = (os.stat(tmp_filename).st_mode & stat.S_ISVTX) + os.rmdir(tmp_filename) + + with self.check_context(arc.open(), 'fully_trusted'): + if have_sticky_files: + self.expect_file('all_bits', mode='?rwsrwsrwt') + else: + self.expect_file('all_bits', mode='?rwsrwsrwx') + self.expect_file('perm_bits', mode='?rwxrwxrwx') + self.expect_file('exec_group_other', mode='?rw-rwxrwx') + self.expect_file('read_group_only', mode='?---r-----') + self.expect_file('no_bits', mode='?---------') + if have_sticky_dirs: + self.expect_file('dir/', mode='?---rwsrwt') + else: + self.expect_file('dir/', mode='?---rwsrwx') + + with self.check_context(arc.open(), 'tar'): + self.expect_file('all_bits', mode='?rwxr-xr-x') + self.expect_file('perm_bits', mode='?rwxr-xr-x') + self.expect_file('exec_group_other', mode='?rw-r-xr-x') + self.expect_file('read_group_only', mode='?---r-----') + self.expect_file('no_bits', mode='?---------') + self.expect_file('dir/', mode='?---r-xr-x') + + with self.check_context(arc.open(), 'data'): + normal_dir_mode = stat.filemode(stat.S_IMODE( + self.outerdir.stat().st_mode)) + self.expect_file('all_bits', mode='?rwxr-xr-x') + self.expect_file('perm_bits', mode='?rwxr-xr-x') + self.expect_file('exec_group_other', mode='?rw-r--r--') + self.expect_file('read_group_only', mode='?rw-r-----') + self.expect_file('no_bits', mode='?rw-------') + self.expect_file('dir/', mode=normal_dir_mode) + + def test_pipe(self): + # Test handling of a special file + with ArchiveMaker() as arc: + arc.add('foo', type=tarfile.FIFOTYPE) + + for filter in 'fully_trusted', 'tar': + with self.check_context(arc.open(), filter): + if hasattr(os, 'mkfifo'): + self.expect_file('foo', type=tarfile.FIFOTYPE) + else: + # The pipe can't be extracted and is skipped. + pass + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.SpecialFileError, + "'foo' is a special file") + + def test_special_files(self): + # Creating device files is tricky. Instead of attempting that let's + # only check the filter result. + for special_type in tarfile.FIFOTYPE, tarfile.CHRTYPE, tarfile.BLKTYPE: + tarinfo = tarfile.TarInfo('foo') + tarinfo.type = special_type + trusted = tarfile.fully_trusted_filter(tarinfo, '') + self.assertIs(trusted, tarinfo) + tar = tarfile.tar_filter(tarinfo, '') + self.assertEqual(tar.type, special_type) + with self.assertRaises(tarfile.SpecialFileError) as cm: + tarfile.data_filter(tarinfo, '') + self.assertIsInstance(cm.exception.tarinfo, tarfile.TarInfo) + self.assertEqual(cm.exception.tarinfo.name, 'foo') + + def test_fully_trusted_filter(self): + # The 'fully_trusted' filter returns the original TarInfo objects. + with tarfile.TarFile.open(tarname) as tar: + for tarinfo in tar.getmembers(): + filtered = tarfile.fully_trusted_filter(tarinfo, '') + self.assertIs(filtered, tarinfo) + + def test_tar_filter(self): + # The 'tar' filter returns TarInfo objects with the same name/type. + # (It can also fail for particularly "evil" input, but we don't have + # that in the test archive.) + with tarfile.TarFile.open(tarname) as tar: + for tarinfo in tar.getmembers(): + filtered = tarfile.tar_filter(tarinfo, '') + self.assertIs(filtered.name, tarinfo.name) + self.assertIs(filtered.type, tarinfo.type) + + def test_data_filter(self): + # The 'data' filter either raises, or returns TarInfo with the same + # name/type. + with tarfile.TarFile.open(tarname) as tar: + for tarinfo in tar.getmembers(): + try: + filtered = tarfile.data_filter(tarinfo, '') + except tarfile.FilterError: + continue + self.assertIs(filtered.name, tarinfo.name) + self.assertIs(filtered.type, tarinfo.type) + + def test_default_filter_warns_not(self): + """Ensure the default filter does not warn (like in 3.12)""" + with ArchiveMaker() as arc: + arc.add('foo') + with warnings_helper.check_no_warnings(self): + with self.check_context(arc.open(), None): + self.expect_file('foo') + + def test_change_default_filter_on_instance(self): + tar = tarfile.TarFile(tarname, 'r') + def strict_filter(tarinfo, path): + if tarinfo.name == 'ustar/regtype': + return tarinfo + else: + return None + tar.extraction_filter = strict_filter + with self.check_context(tar, None): + self.expect_file('ustar/regtype') + + def test_change_default_filter_on_class(self): + def strict_filter(tarinfo, path): + if tarinfo.name == 'ustar/regtype': + return tarinfo + else: + return None + tar = tarfile.TarFile(tarname, 'r') + with support.swap_attr(tarfile.TarFile, 'extraction_filter', + staticmethod(strict_filter)): + with self.check_context(tar, None): + self.expect_file('ustar/regtype') + + def test_change_default_filter_on_subclass(self): + class TarSubclass(tarfile.TarFile): + def extraction_filter(self, tarinfo, path): + if tarinfo.name == 'ustar/regtype': + return tarinfo + else: + return None + + tar = TarSubclass(tarname, 'r') + with self.check_context(tar, None): + self.expect_file('ustar/regtype') + + def test_change_default_filter_to_string(self): + tar = tarfile.TarFile(tarname, 'r') + tar.extraction_filter = 'data' + with self.check_context(tar, None): + self.expect_exception(TypeError) + + def test_custom_filter(self): + def custom_filter(tarinfo, path): + self.assertIs(path, self.destdir) + if tarinfo.name == 'move_this': + return tarinfo.replace(name='moved') + if tarinfo.name == 'ignore_this': + return None + return tarinfo + + with ArchiveMaker() as arc: + arc.add('move_this') + arc.add('ignore_this') + arc.add('keep') + with self.check_context(arc.open(), custom_filter): + self.expect_file('moved') + self.expect_file('keep') + + def test_bad_filter_name(self): + with ArchiveMaker() as arc: + arc.add('foo') + with self.check_context(arc.open(), 'bad filter name'): + self.expect_exception(ValueError) + + def test_stateful_filter(self): + # Stateful filters should be possible. + # (This doesn't really test tarfile. Rather, it demonstrates + # that third parties can implement a stateful filter.) + class StatefulFilter: + def __enter__(self): + self.num_files_processed = 0 + return self + + def __call__(self, tarinfo, path): + try: + tarinfo = tarfile.data_filter(tarinfo, path) + except tarfile.FilterError: + return None + self.num_files_processed += 1 + return tarinfo + + def __exit__(self, *exc_info): + self.done = True + + with ArchiveMaker() as arc: + arc.add('good') + arc.add('bad', symlink_to='/') + arc.add('good') + with StatefulFilter() as custom_filter: + with self.check_context(arc.open(), custom_filter): + self.expect_file('good') + self.assertEqual(custom_filter.num_files_processed, 2) + self.assertEqual(custom_filter.done, True) + + def test_errorlevel(self): + def extracterror_filter(tarinfo, path): + raise tarfile.ExtractError('failed with ExtractError') + def filtererror_filter(tarinfo, path): + raise tarfile.FilterError('failed with FilterError') + def oserror_filter(tarinfo, path): + raise OSError('failed with OSError') + def tarerror_filter(tarinfo, path): + raise tarfile.TarError('failed with base TarError') + def valueerror_filter(tarinfo, path): + raise ValueError('failed with ValueError') + + with ArchiveMaker() as arc: + arc.add('file') + + # If errorlevel is 0, errors affected by errorlevel are ignored + + with self.check_context(arc.open(errorlevel=0), extracterror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=0), filtererror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=0), oserror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=0), tarerror_filter): + self.expect_exception(tarfile.TarError) + + with self.check_context(arc.open(errorlevel=0), valueerror_filter): + self.expect_exception(ValueError) + + # If 1, all fatal errors are raised + + with self.check_context(arc.open(errorlevel=1), extracterror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=1), filtererror_filter): + self.expect_exception(tarfile.FilterError) + + with self.check_context(arc.open(errorlevel=1), oserror_filter): + self.expect_exception(OSError) + + with self.check_context(arc.open(errorlevel=1), tarerror_filter): + self.expect_exception(tarfile.TarError) + + with self.check_context(arc.open(errorlevel=1), valueerror_filter): + self.expect_exception(ValueError) + + # If 2, all non-fatal errors are raised as well. + + with self.check_context(arc.open(errorlevel=2), extracterror_filter): + self.expect_exception(tarfile.ExtractError) + + with self.check_context(arc.open(errorlevel=2), filtererror_filter): + self.expect_exception(tarfile.FilterError) + + with self.check_context(arc.open(errorlevel=2), oserror_filter): + self.expect_exception(OSError) + + with self.check_context(arc.open(errorlevel=2), tarerror_filter): + self.expect_exception(tarfile.TarError) + + with self.check_context(arc.open(errorlevel=2), valueerror_filter): + self.expect_exception(ValueError) + + # We only handle ExtractionError, FilterError & OSError specially. + + with self.check_context(arc.open(errorlevel='boo!'), filtererror_filter): + self.expect_exception(TypeError) # errorlevel is not int + + def setUpModule(): os_helper.unlink(TEMPDIR) os.makedirs(TEMPDIR) diff -Nru python3.10-3.10.11/Lib/test/test_urlparse.py python3.10-3.10.12/Lib/test/test_urlparse.py --- python3.10-3.10.11/Lib/test/test_urlparse.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/test/test_urlparse.py 2023-06-06 22:30:33.000000000 +0000 @@ -649,6 +649,65 @@ self.assertEqual(p.scheme, "http") self.assertEqual(p.geturl(), "http://www.python.org/javascript:alert('msg')/?query=something#fragment") + def test_urlsplit_strip_url(self): + noise = bytes(range(0, 0x20 + 1)) + base_url = "http://User:Pass@www.python.org:080/doc/?query=yes#frag" + + url = noise.decode("utf-8") + base_url + p = urllib.parse.urlsplit(url) + self.assertEqual(p.scheme, "http") + self.assertEqual(p.netloc, "User:Pass@www.python.org:080") + self.assertEqual(p.path, "/doc/") + self.assertEqual(p.query, "query=yes") + self.assertEqual(p.fragment, "frag") + self.assertEqual(p.username, "User") + self.assertEqual(p.password, "Pass") + self.assertEqual(p.hostname, "www.python.org") + self.assertEqual(p.port, 80) + self.assertEqual(p.geturl(), base_url) + + url = noise + base_url.encode("utf-8") + p = urllib.parse.urlsplit(url) + self.assertEqual(p.scheme, b"http") + self.assertEqual(p.netloc, b"User:Pass@www.python.org:080") + self.assertEqual(p.path, b"/doc/") + self.assertEqual(p.query, b"query=yes") + self.assertEqual(p.fragment, b"frag") + self.assertEqual(p.username, b"User") + self.assertEqual(p.password, b"Pass") + self.assertEqual(p.hostname, b"www.python.org") + self.assertEqual(p.port, 80) + self.assertEqual(p.geturl(), base_url.encode("utf-8")) + + # Test that trailing space is preserved as some applications rely on + # this within query strings. + query_spaces_url = "https://www.python.org:88/doc/?query= " + p = urllib.parse.urlsplit(noise.decode("utf-8") + query_spaces_url) + self.assertEqual(p.scheme, "https") + self.assertEqual(p.netloc, "www.python.org:88") + self.assertEqual(p.path, "/doc/") + self.assertEqual(p.query, "query= ") + self.assertEqual(p.port, 88) + self.assertEqual(p.geturl(), query_spaces_url) + + p = urllib.parse.urlsplit("www.pypi.org ") + # That "hostname" gets considered a "path" due to the + # trailing space and our existing logic... YUCK... + # and re-assembles via geturl aka unurlsplit into the original. + # django.core.validators.URLValidator (at least through v3.2) relies on + # this, for better or worse, to catch it in a ValidationError via its + # regular expressions. + # Here we test the basic round trip concept of such a trailing space. + self.assertEqual(urllib.parse.urlunsplit(p), "www.pypi.org ") + + # with scheme as cache-key + url = "//www.python.org/" + scheme = noise.decode("utf-8") + "https" + noise.decode("utf-8") + for _ in range(2): + p = urllib.parse.urlsplit(url, scheme=scheme) + self.assertEqual(p.scheme, "https") + self.assertEqual(p.geturl(), "https://www.python.org/") + def test_attributes_bad_port(self): """Check handling of invalid ports.""" for bytes in (False, True): @@ -656,7 +715,7 @@ for port in ("foo", "1.5", "-1", "0x10", "-0", "1_1", " 1", "1 ", "६"): with self.subTest(bytes=bytes, parse=parse, port=port): netloc = "www.example.net:" + port - url = "http://" + netloc + url = "http://" + netloc + "/" if bytes: if netloc.isascii() and port.isascii(): netloc = netloc.encode("ascii") diff -Nru python3.10-3.10.11/Lib/test/test_uu.py python3.10-3.10.12/Lib/test/test_uu.py --- python3.10-3.10.11/Lib/test/test_uu.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/test/test_uu.py 2023-06-06 22:30:33.000000000 +0000 @@ -145,6 +145,34 @@ uu.encode(inp, out, filename) self.assertIn(safefilename, out.getvalue()) + def test_no_directory_traversal(self): + relative_bad = b"""\ +begin 644 ../../../../../../../../tmp/test1 +$86)C"@`` +` +end +""" + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(relative_bad)) + if os.altsep: + relative_bad_bs = relative_bad.replace(b'/', b'\\') + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(relative_bad_bs)) + + absolute_bad = b"""\ +begin 644 /tmp/test2 +$86)C"@`` +` +end +""" + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(absolute_bad)) + if os.altsep: + absolute_bad_bs = absolute_bad.replace(b'/', b'\\') + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(absolute_bad_bs)) + + class UUStdIOTest(unittest.TestCase): def setUp(self): diff -Nru python3.10-3.10.11/Lib/traceback.py python3.10-3.10.12/Lib/traceback.py --- python3.10-3.10.11/Lib/traceback.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/traceback.py 2023-06-06 22:30:33.000000000 +0000 @@ -481,8 +481,8 @@ occurred. - :attr:`offset` For syntax errors - the offset into the text where the error occurred. - - :attr:`end_offset` For syntax errors - the offset into the text where the - error occurred. Can be `None` if not present. + - :attr:`end_offset` For syntax errors - the end offset into the text where + the error occurred. Can be `None` if not present. - :attr:`msg` For syntax errors - the compiler error message. """ diff -Nru python3.10-3.10.11/Lib/trace.py python3.10-3.10.12/Lib/trace.py --- python3.10-3.10.11/Lib/trace.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/trace.py 2023-06-06 22:30:33.000000000 +0000 @@ -49,6 +49,7 @@ """ __all__ = ['Trace', 'CoverageResults'] +import io import linecache import os import sys @@ -716,7 +717,7 @@ sys.argv = [opts.progname, *opts.arguments] sys.path[0] = os.path.dirname(opts.progname) - with open(opts.progname, 'rb') as fp: + with io.open_code(opts.progname) as fp: code = compile(fp.read(), opts.progname, 'exec') # try to emulate __main__ namespace as much as possible globs = { diff -Nru python3.10-3.10.11/Lib/urllib/parse.py python3.10-3.10.12/Lib/urllib/parse.py --- python3.10-3.10.11/Lib/urllib/parse.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/urllib/parse.py 2023-06-06 22:30:33.000000000 +0000 @@ -25,6 +25,10 @@ scenarios for parsing, and for backward compatibility purposes, some parsing quirks from older RFCs are retained. The testcases in test_urlparse.py provides a good indicator of parsing behavior. + +The WHATWG URL Parser spec should also be considered. We are not compliant with +it either due to existing user code API behavior expectations (Hyrum's Law). +It serves as a useful guide when making changes. """ import re @@ -78,6 +82,10 @@ '0123456789' '+-.') +# Leading and trailing C0 control and space to be stripped per WHATWG spec. +# == "".join([chr(i) for i in range(0, 0x20 + 1)]) +_WHATWG_C0_CONTROL_OR_SPACE = '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f ' + # Unsafe bytes to be removed per WHATWG spec _UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r', '\n'] @@ -455,6 +463,10 @@ """ url, scheme, _coerce_result = _coerce_args(url, scheme) + # Only lstrip url as some applications rely on preserving trailing space. + # (https://url.spec.whatwg.org/#concept-basic-url-parser would strip both) + url = url.lstrip(_WHATWG_C0_CONTROL_OR_SPACE) + scheme = scheme.strip(_WHATWG_C0_CONTROL_OR_SPACE) for b in _UNSAFE_URL_BYTES_TO_REMOVE: url = url.replace(b, "") diff -Nru python3.10-3.10.11/Lib/uu.py python3.10-3.10.12/Lib/uu.py --- python3.10-3.10.11/Lib/uu.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Lib/uu.py 2023-06-06 22:30:33.000000000 +0000 @@ -130,7 +130,14 @@ # If the filename isn't ASCII, what's up with that?!? out_file = hdrfields[2].rstrip(b' \t\r\n\f').decode("ascii") if os.path.exists(out_file): - raise Error('Cannot overwrite existing file: %s' % out_file) + raise Error(f'Cannot overwrite existing file: {out_file}') + if (out_file.startswith(os.sep) or + f'..{os.sep}' in out_file or ( + os.altsep and + (out_file.startswith(os.altsep) or + f'..{os.altsep}' in out_file)) + ): + raise Error(f'Refusing to write to {out_file} due to directory traversal') if mode is None: mode = int(hdrfields[1], 8) # diff -Nru python3.10-3.10.11/Mac/BuildScript/build-installer.py python3.10-3.10.12/Mac/BuildScript/build-installer.py --- python3.10-3.10.11/Mac/BuildScript/build-installer.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Mac/BuildScript/build-installer.py 2023-06-06 22:30:33.000000000 +0000 @@ -246,9 +246,9 @@ result.extend([ dict( - name="OpenSSL 1.1.1t", - url="https://www.openssl.org/source/openssl-1.1.1t.tar.gz", - checksum='8dee9b24bdb1dcbf0c3d1e9b02fb8f6bf22165e807f45adeb7c9677536859d3b', + name="OpenSSL 1.1.1u", + url="https://www.openssl.org/source/openssl-1.1.1u.tar.gz", + checksum='e2f8d84b523eecd06c7be7626830370300fbcc15386bf5142d72758f6963ebc6', buildrecipe=build_universal_openssl, configure=None, install=None, diff -Nru python3.10-3.10.11/Mac/BuildScript/resources/Welcome.rtf python3.10-3.10.12/Mac/BuildScript/resources/Welcome.rtf --- python3.10-3.10.11/Mac/BuildScript/resources/Welcome.rtf 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Mac/BuildScript/resources/Welcome.rtf 2023-06-06 22:30:33.000000000 +0000 @@ -1,8 +1,8 @@ {\rtf1\ansi\ansicpg1252\cocoartf2708 \cocoascreenfonts1\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT; -\f3\fnil\fcharset0 HelveticaNeue;\f4\fnil\fcharset0 HelveticaNeue-Bold;} -{\colortbl;\red255\green255\blue255;\red24\green26\blue30;\red255\green255\blue255;} -{\*\expandedcolortbl;;\cssrgb\c12157\c13725\c15686;\cssrgb\c100000\c100000\c100000;} +} +{\colortbl;\red255\green255\blue255;} +{\*\expandedcolortbl;;} \margl1440\margr1440\vieww12200\viewh10880\viewkind0 \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 @@ -12,9 +12,8 @@ \f1\b macOS $MACOSX_DEPLOYMENT_TARGET \f0\b0 .\ \ -\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 -\f1\b \cf0 Python for macOS +\f1\b Python for macOS \f0\b0 consists of the {\field{\*\fldinst{HYPERLINK "https://www.python.org"}}{\fldrslt Python}} programming language interpreter and its batteries-included standard library to allow easy access to macOS features. It also includes the Python integrated development environment, \f1\b IDLE \f0\b0 . You can also use the included @@ -27,31 +26,24 @@ \ \f1\b macOS 13 Ventura users -\f0\b0 : due to an issue with macOS +\f0\b0 : Due to an issue with the macOS +\f1\b Installer +\f0\b0 app, installation of some third-party packages including this Python package may fail with a vague +\f1\b "The installer encountered an error" +\f0\b0 message if the +\f1\b Installer +\f0\b0 app does not have permission to access the folder containing the downloaded installer file, typically in the +\f1\b Downloads +\f0\b0 folder. Go to +\f1\b System Settings +\f0\b0 -> +\f1\b Privacy & Security +\f0\b0 -> +\f1\b Files and Folders +\f0\b0 , then click the mark in front of \f1\b Installer -\f0\b0 app, installation of some -\f3 \cf2 \cb3 \expnd0\expndtw0\kerning0 -third-party -\f0 \cf0 \cb1 \kerning1\expnd0\expndtw0 packages including this Python package may fail with a vague -\f4\b \cf2 \cb3 \expnd0\expndtw0\kerning0 -\'93The installer encountered an error\'94 -\f3\b0 \cf2 message if the -\f4\b \cf2 Installer -\f3\b0 \cf2 app does not have permission to access the folder containing the downloaded installer file, typically in the -\f4\b \cf2 Downloads -\f3\b0 \cf2 folder. Go to -\f4\b \cf2 System Settings -\f3\b0 \cf2 -> -\f4\b \cf2 Privacy & Security -\f3\b0 \cf2 -> -\f4\b \cf2 Files and Folders -\f3\b0 \cf2 , then click the mark in front of -\f4\b \cf2 Installer -\f3\b0 \cf2 to expand, and enable -\f4\b \cf2 Downloads Folder -\f0\b0 \cf0 \cb1 \kerning1\expnd0\expndtw0 by moving the toggle to the right -\f3 \cf2 \cb3 \expnd0\expndtw0\kerning0 -. See {\field{\*\fldinst{HYPERLINK "https://github.com/python/cpython/issues/103207"}}{\fldrslt https://github.com/python/cpython/issues/103207}} for up-to-date information on this issue. -\f0 \cf0 \cb1 \kerning1\expnd0\expndtw0 \ +\f0\b0 to expand, and enable +\f1\b Downloads Folder +\f0\b0 by moving the toggle to the right. See {\field{\*\fldinst{HYPERLINK "https://github.com/python/cpython/issues/103207"}}{\fldrslt https://github.com/python/cpython/issues/103207}} for up-to-date information on this issue.\ \ } \ No newline at end of file diff -Nru python3.10-3.10.11/Misc/NEWS python3.10-3.10.12/Misc/NEWS --- python3.10-3.10.11/Misc/NEWS 2023-04-04 22:10:32.000000000 +0000 +++ python3.10-3.10.12/Misc/NEWS 2023-06-06 22:43:10.000000000 +0000 @@ -2,6 +2,54 @@ Python News +++++++++++ +What's New in Python 3.10.12 final? +=================================== + +*Release date: 2023-06-06* + +Security +-------- + +- gh-issue-103142: The version of OpenSSL used in our binary builds has been + upgraded to 1.1.1u to address several CVEs. + +- gh-issue-99889: Fixed a security in flaw in :func:`uu.decode` that could + allow for directory traversal based on the input if no ``out_file`` was + specified. + +- gh-issue-104049: Do not expose the local on-disk location in directory + indexes produced by :class:`http.client.SimpleHTTPRequestHandler`. + +- gh-issue-102153: :func:`urllib.parse.urlsplit` now strips leading C0 + control and space characters following the specification for URLs defined + by WHATWG in response to CVE-2023-24329. Patch by Illia Volochii. + +Library +------- + +- gh-issue-103935: Use :func:`io.open_code` for files to be executed instead + of raw :func:`open` + +- gh-issue-102953: The extraction methods in :mod:`tarfile`, and + :func:`shutil.unpack_archive`, have a new a *filter* argument that allows + limiting tar features than may be surprising or dangerous, such as + creating files outside the destination directory. See + :ref:`tarfile-extraction-filter` for details. + +Documentation +------------- + +- gh-issue-89412: Add missing documentation for the ``end_lineno`` and + ``end_offset`` attributes of the :class:`traceback.TracebackException` + class. + +Build +----- + +- gh-issue-103262: Fixes Windows installer build to work with latest + compilers. + + What's New in Python 3.10.11 final? =================================== diff -Nru python3.10-3.10.11/Modules/_ssl_data_111.h python3.10-3.10.12/Modules/_ssl_data_111.h --- python3.10-3.10.11/Modules/_ssl_data_111.h 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Modules/_ssl_data_111.h 2023-06-06 22:30:33.000000000 +0000 @@ -1,4 +1,4 @@ -/* File generated by Tools/ssl/make_ssl_data.py *//* Generated on 2021-04-09T09:36:21.493286 */ +/* File generated by Tools/ssl/make_ssl_data.py *//* Generated on 2023-06-01T02:58:04.081473 */ static struct py_ssl_library_code library_codes[] = { #ifdef ERR_LIB_ASN1 {"ASN1", ERR_LIB_ASN1}, @@ -1375,6 +1375,11 @@ #else {"UNSUPPORTED_COMPRESSION_ALGORITHM", 46, 151}, #endif + #ifdef CMS_R_UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM + {"UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM", ERR_LIB_CMS, CMS_R_UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM}, + #else + {"UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM", 46, 194}, + #endif #ifdef CMS_R_UNSUPPORTED_CONTENT_TYPE {"UNSUPPORTED_CONTENT_TYPE", ERR_LIB_CMS, CMS_R_UNSUPPORTED_CONTENT_TYPE}, #else @@ -4860,6 +4865,11 @@ #else {"MISSING_PARAMETERS", 20, 290}, #endif + #ifdef SSL_R_MISSING_PSK_KEX_MODES_EXTENSION + {"MISSING_PSK_KEX_MODES_EXTENSION", ERR_LIB_SSL, SSL_R_MISSING_PSK_KEX_MODES_EXTENSION}, + #else + {"MISSING_PSK_KEX_MODES_EXTENSION", 20, 310}, + #endif #ifdef SSL_R_MISSING_RSA_CERTIFICATE {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, SSL_R_MISSING_RSA_CERTIFICATE}, #else @@ -5065,6 +5075,11 @@ #else {"NULL_SSL_METHOD_PASSED", 20, 196}, #endif + #ifdef SSL_R_OCSP_CALLBACK_FAILURE + {"OCSP_CALLBACK_FAILURE", ERR_LIB_SSL, SSL_R_OCSP_CALLBACK_FAILURE}, + #else + {"OCSP_CALLBACK_FAILURE", 20, 294}, + #endif #ifdef SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED}, #else diff -Nru python3.10-3.10.11/Modules/_ssl_data_300.h python3.10-3.10.12/Modules/_ssl_data_300.h --- python3.10-3.10.11/Modules/_ssl_data_300.h 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Modules/_ssl_data_300.h 2023-06-06 22:30:33.000000000 +0000 @@ -1,4 +1,4 @@ -/* File generated by Tools/ssl/make_ssl_data.py *//* Generated on 2021-04-09T09:44:43.288448 */ +/* File generated by Tools/ssl/make_ssl_data.py *//* Generated on 2023-06-01T03:03:52.163218 */ static struct py_ssl_library_code library_codes[] = { #ifdef ERR_LIB_ASN1 {"ASN1", ERR_LIB_ASN1}, @@ -1035,6 +1035,11 @@ #else {"NO_INVERSE", 3, 108}, #endif + #ifdef BN_R_NO_PRIME_CANDIDATE + {"NO_PRIME_CANDIDATE", ERR_LIB_BN, BN_R_NO_PRIME_CANDIDATE}, + #else + {"NO_PRIME_CANDIDATE", 3, 121}, + #endif #ifdef BN_R_NO_SOLUTION {"NO_SOLUTION", ERR_LIB_BN, BN_R_NO_SOLUTION}, #else @@ -1255,6 +1260,11 @@ #else {"INVALID_OPTION", 58, 174}, #endif + #ifdef CMP_R_MISSING_CERTID + {"MISSING_CERTID", ERR_LIB_CMP, CMP_R_MISSING_CERTID}, + #else + {"MISSING_CERTID", 58, 165}, + #endif #ifdef CMP_R_MISSING_KEY_INPUT_FOR_CREATING_PROTECTION {"MISSING_KEY_INPUT_FOR_CREATING_PROTECTION", ERR_LIB_CMP, CMP_R_MISSING_KEY_INPUT_FOR_CREATING_PROTECTION}, #else @@ -1280,21 +1290,41 @@ #else {"MISSING_PRIVATE_KEY", 58, 131}, #endif + #ifdef CMP_R_MISSING_PRIVATE_KEY_FOR_POPO + {"MISSING_PRIVATE_KEY_FOR_POPO", ERR_LIB_CMP, CMP_R_MISSING_PRIVATE_KEY_FOR_POPO}, + #else + {"MISSING_PRIVATE_KEY_FOR_POPO", 58, 190}, + #endif #ifdef CMP_R_MISSING_PROTECTION {"MISSING_PROTECTION", ERR_LIB_CMP, CMP_R_MISSING_PROTECTION}, #else {"MISSING_PROTECTION", 58, 143}, #endif + #ifdef CMP_R_MISSING_PUBLIC_KEY + {"MISSING_PUBLIC_KEY", ERR_LIB_CMP, CMP_R_MISSING_PUBLIC_KEY}, + #else + {"MISSING_PUBLIC_KEY", 58, 183}, + #endif #ifdef CMP_R_MISSING_REFERENCE_CERT {"MISSING_REFERENCE_CERT", ERR_LIB_CMP, CMP_R_MISSING_REFERENCE_CERT}, #else {"MISSING_REFERENCE_CERT", 58, 168}, #endif + #ifdef CMP_R_MISSING_SECRET + {"MISSING_SECRET", ERR_LIB_CMP, CMP_R_MISSING_SECRET}, + #else + {"MISSING_SECRET", 58, 178}, + #endif #ifdef CMP_R_MISSING_SENDER_IDENTIFICATION {"MISSING_SENDER_IDENTIFICATION", ERR_LIB_CMP, CMP_R_MISSING_SENDER_IDENTIFICATION}, #else {"MISSING_SENDER_IDENTIFICATION", 58, 111}, #endif + #ifdef CMP_R_MISSING_TRUST_ANCHOR + {"MISSING_TRUST_ANCHOR", ERR_LIB_CMP, CMP_R_MISSING_TRUST_ANCHOR}, + #else + {"MISSING_TRUST_ANCHOR", 58, 179}, + #endif #ifdef CMP_R_MISSING_TRUST_STORE {"MISSING_TRUST_STORE", ERR_LIB_CMP, CMP_R_MISSING_TRUST_STORE}, #else @@ -1455,6 +1485,11 @@ #else {"WRONG_ALGORITHM_OID", 58, 138}, #endif + #ifdef CMP_R_WRONG_CERTID + {"WRONG_CERTID", ERR_LIB_CMP, CMP_R_WRONG_CERTID}, + #else + {"WRONG_CERTID", 58, 189}, + #endif #ifdef CMP_R_WRONG_CERTID_IN_RP {"WRONG_CERTID_IN_RP", ERR_LIB_CMP, CMP_R_WRONG_CERTID_IN_RP}, #else @@ -1885,6 +1920,11 @@ #else {"UNSUPPORTED_COMPRESSION_ALGORITHM", 46, 151}, #endif + #ifdef CMS_R_UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM + {"UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM", ERR_LIB_CMS, CMS_R_UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM}, + #else + {"UNSUPPORTED_CONTENT_ENCRYPTION_ALGORITHM", 46, 194}, + #endif #ifdef CMS_R_UNSUPPORTED_CONTENT_TYPE {"UNSUPPORTED_CONTENT_TYPE", ERR_LIB_CMS, CMS_R_UNSUPPORTED_CONTENT_TYPE}, #else @@ -2045,6 +2085,11 @@ #else {"RECURSIVE_DIRECTORY_INCLUDE", 14, 111}, #endif + #ifdef CONF_R_RELATIVE_PATH + {"RELATIVE_PATH", ERR_LIB_CONF, CONF_R_RELATIVE_PATH}, + #else + {"RELATIVE_PATH", 14, 125}, + #endif #ifdef CONF_R_SSL_COMMAND_SECTION_EMPTY {"SSL_COMMAND_SECTION_EMPTY", ERR_LIB_CONF, CONF_R_SSL_COMMAND_SECTION_EMPTY}, #else @@ -2235,6 +2280,11 @@ #else {"INSUFFICIENT_SECURE_DATA_SPACE", 15, 108}, #endif + #ifdef CRYPTO_R_INVALID_NEGATIVE_VALUE + {"INVALID_NEGATIVE_VALUE", ERR_LIB_CRYPTO, CRYPTO_R_INVALID_NEGATIVE_VALUE}, + #else + {"INVALID_NEGATIVE_VALUE", 15, 122}, + #endif #ifdef CRYPTO_R_INVALID_NULL_ARGUMENT {"INVALID_NULL_ARGUMENT", ERR_LIB_CRYPTO, CRYPTO_R_INVALID_NULL_ARGUMENT}, #else @@ -2605,6 +2655,11 @@ #else {"SEED_LEN_SMALL", 10, 110}, #endif + #ifdef DSA_R_TOO_MANY_RETRIES + {"TOO_MANY_RETRIES", ERR_LIB_DSA, DSA_R_TOO_MANY_RETRIES}, + #else + {"TOO_MANY_RETRIES", 10, 116}, + #endif #ifdef DSO_R_CTRL_FAILED {"CTRL_FAILED", ERR_LIB_DSO, DSO_R_CTRL_FAILED}, #else @@ -2745,6 +2800,11 @@ #else {"EC_GROUP_NEW_BY_NAME_FAILURE", 16, 119}, #endif + #ifdef EC_R_EXPLICIT_PARAMS_NOT_SUPPORTED + {"EXPLICIT_PARAMS_NOT_SUPPORTED", ERR_LIB_EC, EC_R_EXPLICIT_PARAMS_NOT_SUPPORTED}, + #else + {"EXPLICIT_PARAMS_NOT_SUPPORTED", 16, 127}, + #endif #ifdef EC_R_FAILED_MAKING_PUBLIC_KEY {"FAILED_MAKING_PUBLIC_KEY", ERR_LIB_EC, EC_R_FAILED_MAKING_PUBLIC_KEY}, #else @@ -2850,6 +2910,11 @@ #else {"INVALID_KEY", 16, 116}, #endif + #ifdef EC_R_INVALID_LENGTH + {"INVALID_LENGTH", ERR_LIB_EC, EC_R_INVALID_LENGTH}, + #else + {"INVALID_LENGTH", 16, 117}, + #endif #ifdef EC_R_INVALID_NAMED_GROUP_CONVERSION {"INVALID_NAMED_GROUP_CONVERSION", ERR_LIB_EC, EC_R_INVALID_NAMED_GROUP_CONVERSION}, #else @@ -3010,6 +3075,11 @@ #else {"SLOT_FULL", 16, 108}, #endif + #ifdef EC_R_TOO_MANY_RETRIES + {"TOO_MANY_RETRIES", ERR_LIB_EC, EC_R_TOO_MANY_RETRIES}, + #else + {"TOO_MANY_RETRIES", 16, 176}, + #endif #ifdef EC_R_UNDEFINED_GENERATOR {"UNDEFINED_GENERATOR", ERR_LIB_EC, EC_R_UNDEFINED_GENERATOR}, #else @@ -3690,6 +3760,11 @@ #else {"PUBLIC_KEY_NOT_RSA", 6, 106}, #endif + #ifdef EVP_R_SETTING_XOF_FAILED + {"SETTING_XOF_FAILED", ERR_LIB_EVP, EVP_R_SETTING_XOF_FAILED}, + #else + {"SETTING_XOF_FAILED", 6, 227}, + #endif #ifdef EVP_R_SET_DEFAULT_PROPERTY_FAILURE {"SET_DEFAULT_PROPERTY_FAILURE", ERR_LIB_EVP, EVP_R_SET_DEFAULT_PROPERTY_FAILURE}, #else @@ -3865,6 +3940,11 @@ #else {"FAILED_READING_DATA", 61, 128}, #endif + #ifdef HTTP_R_HEADER_PARSE_ERROR + {"HEADER_PARSE_ERROR", ERR_LIB_HTTP, HTTP_R_HEADER_PARSE_ERROR}, + #else + {"HEADER_PARSE_ERROR", 61, 126}, + #endif #ifdef HTTP_R_INCONSISTENT_CONTENT_LENGTH {"INCONSISTENT_CONTENT_LENGTH", ERR_LIB_HTTP, HTTP_R_INCONSISTENT_CONTENT_LENGTH}, #else @@ -3935,6 +4015,16 @@ #else {"RESPONSE_PARSE_ERROR", 61, 104}, #endif + #ifdef HTTP_R_RETRY_TIMEOUT + {"RETRY_TIMEOUT", ERR_LIB_HTTP, HTTP_R_RETRY_TIMEOUT}, + #else + {"RETRY_TIMEOUT", 61, 129}, + #endif + #ifdef HTTP_R_SERVER_CANCELED_CONNECTION + {"SERVER_CANCELED_CONNECTION", ERR_LIB_HTTP, HTTP_R_SERVER_CANCELED_CONNECTION}, + #else + {"SERVER_CANCELED_CONNECTION", 61, 127}, + #endif #ifdef HTTP_R_SOCK_NOT_SUPPORTED {"SOCK_NOT_SUPPORTED", ERR_LIB_HTTP, HTTP_R_SOCK_NOT_SUPPORTED}, #else @@ -4100,6 +4190,16 @@ #else {"UNSUPPORTED_REQUESTORNAME_TYPE", 39, 129}, #endif + #ifdef OSSL_DECODER_R_COULD_NOT_DECODE_OBJECT + {"COULD_NOT_DECODE_OBJECT", ERR_LIB_OSSL_DECODER, OSSL_DECODER_R_COULD_NOT_DECODE_OBJECT}, + #else + {"COULD_NOT_DECODE_OBJECT", 60, 101}, + #endif + #ifdef OSSL_DECODER_R_DECODER_NOT_FOUND + {"DECODER_NOT_FOUND", ERR_LIB_OSSL_DECODER, OSSL_DECODER_R_DECODER_NOT_FOUND}, + #else + {"DECODER_NOT_FOUND", 60, 102}, + #endif #ifdef OSSL_DECODER_R_MISSING_GET_PARAMS {"MISSING_GET_PARAMS", ERR_LIB_OSSL_DECODER, OSSL_DECODER_R_MISSING_GET_PARAMS}, #else @@ -4190,6 +4290,11 @@ #else {"NOT_PARAMETERS", 44, 104}, #endif + #ifdef OSSL_STORE_R_NO_LOADERS_FOUND + {"NO_LOADERS_FOUND", ERR_LIB_OSSL_STORE, OSSL_STORE_R_NO_LOADERS_FOUND}, + #else + {"NO_LOADERS_FOUND", 44, 123}, + #endif #ifdef OSSL_STORE_R_PASSPHRASE_CALLBACK_ERROR {"PASSPHRASE_CALLBACK_ERROR", ERR_LIB_OSSL_STORE, OSSL_STORE_R_PASSPHRASE_CALLBACK_ERROR}, #else @@ -4935,6 +5040,11 @@ #else {"INVALID_DIGEST_SIZE", 57, 218}, #endif + #ifdef PROV_R_INVALID_INPUT_LENGTH + {"INVALID_INPUT_LENGTH", ERR_LIB_PROV, PROV_R_INVALID_INPUT_LENGTH}, + #else + {"INVALID_INPUT_LENGTH", 57, 230}, + #endif #ifdef PROV_R_INVALID_ITERATION_COUNT {"INVALID_ITERATION_COUNT", ERR_LIB_PROV, PROV_R_INVALID_ITERATION_COUNT}, #else @@ -4970,6 +5080,11 @@ #else {"INVALID_MODE", 57, 125}, #endif + #ifdef PROV_R_INVALID_OUTPUT_LENGTH + {"INVALID_OUTPUT_LENGTH", ERR_LIB_PROV, PROV_R_INVALID_OUTPUT_LENGTH}, + #else + {"INVALID_OUTPUT_LENGTH", 57, 217}, + #endif #ifdef PROV_R_INVALID_PADDING_MODE {"INVALID_PADDING_MODE", ERR_LIB_PROV, PROV_R_INVALID_PADDING_MODE}, #else @@ -5035,6 +5150,16 @@ #else {"KEY_SIZE_TOO_SMALL", 57, 171}, #endif + #ifdef PROV_R_LENGTH_TOO_LARGE + {"LENGTH_TOO_LARGE", ERR_LIB_PROV, PROV_R_LENGTH_TOO_LARGE}, + #else + {"LENGTH_TOO_LARGE", 57, 202}, + #endif + #ifdef PROV_R_MISMATCHING_DOMAIN_PARAMETERS + {"MISMATCHING_DOMAIN_PARAMETERS", ERR_LIB_PROV, PROV_R_MISMATCHING_DOMAIN_PARAMETERS}, + #else + {"MISMATCHING_DOMAIN_PARAMETERS", 57, 203}, + #endif #ifdef PROV_R_MISSING_CEK_ALG {"MISSING_CEK_ALG", ERR_LIB_PROV, PROV_R_MISSING_CEK_ALG}, #else @@ -5695,6 +5820,11 @@ #else {"INVALID_LABEL", 4, 160}, #endif + #ifdef RSA_R_INVALID_LENGTH + {"INVALID_LENGTH", ERR_LIB_RSA, RSA_R_INVALID_LENGTH}, + #else + {"INVALID_LENGTH", 4, 181}, + #endif #ifdef RSA_R_INVALID_MESSAGE_LENGTH {"INVALID_MESSAGE_LENGTH", ERR_LIB_RSA, RSA_R_INVALID_MESSAGE_LENGTH}, #else @@ -5880,6 +6010,11 @@ #else {"Q_NOT_PRIME", 4, 129}, #endif + #ifdef RSA_R_RANDOMNESS_SOURCE_STRENGTH_INSUFFICIENT + {"RANDOMNESS_SOURCE_STRENGTH_INSUFFICIENT", ERR_LIB_RSA, RSA_R_RANDOMNESS_SOURCE_STRENGTH_INSUFFICIENT}, + #else + {"RANDOMNESS_SOURCE_STRENGTH_INSUFFICIENT", 4, 180}, + #endif #ifdef RSA_R_RSA_OPERATIONS_NOT_SUPPORTED {"RSA_OPERATIONS_NOT_SUPPORTED", ERR_LIB_RSA, RSA_R_RSA_OPERATIONS_NOT_SUPPORTED}, #else @@ -6680,6 +6815,11 @@ #else {"INVALID_TICKET_KEYS_LENGTH", 20, 325}, #endif + #ifdef SSL_R_LEGACY_SIGALG_DISALLOWED_OR_UNSUPPORTED + {"LEGACY_SIGALG_DISALLOWED_OR_UNSUPPORTED", ERR_LIB_SSL, SSL_R_LEGACY_SIGALG_DISALLOWED_OR_UNSUPPORTED}, + #else + {"LEGACY_SIGALG_DISALLOWED_OR_UNSUPPORTED", 20, 333}, + #endif #ifdef SSL_R_LENGTH_MISMATCH {"LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_LENGTH_MISMATCH}, #else @@ -6725,6 +6865,11 @@ #else {"MISSING_PARAMETERS", 20, 290}, #endif + #ifdef SSL_R_MISSING_PSK_KEX_MODES_EXTENSION + {"MISSING_PSK_KEX_MODES_EXTENSION", ERR_LIB_SSL, SSL_R_MISSING_PSK_KEX_MODES_EXTENSION}, + #else + {"MISSING_PSK_KEX_MODES_EXTENSION", 20, 310}, + #endif #ifdef SSL_R_MISSING_RSA_CERTIFICATE {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, SSL_R_MISSING_RSA_CERTIFICATE}, #else @@ -6940,6 +7085,11 @@ #else {"NULL_SSL_METHOD_PASSED", 20, 196}, #endif + #ifdef SSL_R_OCSP_CALLBACK_FAILURE + {"OCSP_CALLBACK_FAILURE", ERR_LIB_SSL, SSL_R_OCSP_CALLBACK_FAILURE}, + #else + {"OCSP_CALLBACK_FAILURE", 20, 305}, + #endif #ifdef SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED}, #else diff -Nru python3.10-3.10.11/PCbuild/get_externals.bat python3.10-3.10.12/PCbuild/get_externals.bat --- python3.10-3.10.11/PCbuild/get_externals.bat 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/PCbuild/get_externals.bat 2023-06-06 22:30:33.000000000 +0000 @@ -53,7 +53,7 @@ set libraries= set libraries=%libraries% bzip2-1.0.8 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.3.0 -if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1t +if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1u set libraries=%libraries% sqlite-3.40.1.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.12.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.12.0 @@ -77,7 +77,7 @@ set binaries= if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.3.0 -if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1t +if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1u if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.12.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 diff -Nru python3.10-3.10.11/PCbuild/python.props python3.10-3.10.12/PCbuild/python.props --- python3.10-3.10.11/PCbuild/python.props 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/PCbuild/python.props 2023-06-06 22:30:33.000000000 +0000 @@ -70,8 +70,8 @@ $(ExternalsDir)libffi-3.3.0\ $(libffiDir)$(ArchName)\ $(libffiOutDir)include - $(ExternalsDir)openssl-1.1.1t\ - $(ExternalsDir)openssl-bin-1.1.1t\$(ArchName)\ + $(ExternalsDir)openssl-1.1.1u\ + $(ExternalsDir)openssl-bin-1.1.1u\$(ArchName)\ $(opensslOutDir)include $(ExternalsDir)\nasm-2.11.06\ $(ExternalsDir)\zlib-1.2.13\ diff -Nru python3.10-3.10.11/PCbuild/readme.txt python3.10-3.10.12/PCbuild/readme.txt --- python3.10-3.10.11/PCbuild/readme.txt 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/PCbuild/readme.txt 2023-06-06 22:30:33.000000000 +0000 @@ -167,7 +167,7 @@ Homepage: https://tukaani.org/xz/ _ssl - Python wrapper for version 1.1.1t of the OpenSSL secure sockets + Python wrapper for version 1.1.1u of the OpenSSL secure sockets library, which is downloaded from our binaries repository at https://github.com/python/cpython-bin-deps. diff -Nru python3.10-3.10.11/Python/marshal.c python3.10-3.10.12/Python/marshal.c --- python3.10-3.10.11/Python/marshal.c 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Python/marshal.c 2023-06-06 22:30:33.000000000 +0000 @@ -576,6 +576,10 @@ } /* version currently has no effect for writing ints. */ +/* Note that while the documentation states that this function + * can error, currently it never does. Setting an exception in + * this function should be regarded as an API-breaking change. + */ void PyMarshal_WriteLongToFile(long x, FILE *fp, int version) { diff -Nru python3.10-3.10.11/README.rst python3.10-3.10.12/README.rst --- python3.10-3.10.11/README.rst 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/README.rst 2023-06-06 22:30:33.000000000 +0000 @@ -1,4 +1,4 @@ -This is Python version 3.10.11 +This is Python version 3.10.12 ============================== .. image:: https://travis-ci.com/python/cpython.svg?branch=master diff -Nru python3.10-3.10.11/Tools/msi/build.bat python3.10-3.10.12/Tools/msi/build.bat --- python3.10-3.10.11/Tools/msi/build.bat 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Tools/msi/build.bat 2023-06-06 22:30:33.000000000 +0000 @@ -27,23 +27,18 @@ call "%PCBUILD%find_msbuild.bat" %MSBUILD% if ERRORLEVEL 1 (echo Cannot locate MSBuild.exe on PATH or as MSBUILD variable & exit /b 2) -if defined BUILDX86 ( - call "%PCBUILD%build.bat" -p Win32 -d -e %REBUILD% %BUILDTEST% - if errorlevel 1 exit /B %ERRORLEVEL% - call "%PCBUILD%build.bat" -p Win32 -e %REBUILD% %BUILDTEST% - if errorlevel 1 exit /B %ERRORLEVEL% -) -if defined BUILDX64 ( - call "%PCBUILD%build.bat" -p x64 -d -e %REBUILD% %BUILDTEST% - if errorlevel 1 exit /B %ERRORLEVEL% - call "%PCBUILD%build.bat" -p x64 -e %REBUILD% %BUILDTEST% - if errorlevel 1 exit /B %ERRORLEVEL% -) +if defined BUILDX86 call "%PCBUILD%build.bat" -p Win32 -d -e %REBUILD% %BUILDTEST% +if errorlevel 1 exit /B %ERRORLEVEL% +if defined BUILDX86 call "%PCBUILD%build.bat" -p Win32 -e %REBUILD% %BUILDTEST% +if errorlevel 1 exit /B %ERRORLEVEL% -if defined BUILDDOC ( - call "%PCBUILD%..\Doc\make.bat" htmlhelp - if errorlevel 1 exit /B %ERRORLEVEL% -) +if defined BUILDX64 call "%PCBUILD%build.bat" -p x64 -d -e %REBUILD% %BUILDTEST% +if errorlevel 1 exit /B %ERRORLEVEL% +if defined BUILDX64 call "%PCBUILD%build.bat" -p x64 -e %REBUILD% %BUILDTEST% +if errorlevel 1 exit /B %ERRORLEVEL% + +if defined BUILDDOC call "%PCBUILD%..\Doc\make.bat" html +if errorlevel 1 exit /B %ERRORLEVEL% rem Build the launcher MSI separately %MSBUILD% "%D%launcher\launcher.wixproj" /p:Platform=x86 @@ -60,14 +55,11 @@ set BUILD_CMD=%BUILD_CMD% /t:Rebuild ) -if defined BUILDX86 ( - %MSBUILD% /p:Platform=x86 %BUILD_CMD% - if errorlevel 1 exit /B %ERRORLEVEL% -) -if defined BUILDX64 ( - %MSBUILD% /p:Platform=x64 %BUILD_CMD% - if errorlevel 1 exit /B %ERRORLEVEL% -) +if defined BUILDX86 %MSBUILD% /p:Platform=x86 %BUILD_CMD% +if errorlevel 1 exit /B %ERRORLEVEL% + +if defined BUILDX64 %MSBUILD% /p:Platform=x64 %BUILD_CMD% +if errorlevel 1 exit /B %ERRORLEVEL% exit /B 0 diff -Nru python3.10-3.10.11/Tools/msi/bundle/bootstrap/pythonba.vcxproj python3.10-3.10.12/Tools/msi/bundle/bootstrap/pythonba.vcxproj --- python3.10-3.10.11/Tools/msi/bundle/bootstrap/pythonba.vcxproj 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Tools/msi/bundle/bootstrap/pythonba.vcxproj 2023-06-06 22:30:33.000000000 +0000 @@ -21,6 +21,7 @@ Release Win32 + v143 v142 v141 @@ -49,8 +50,7 @@ comctl32.lib;gdiplus.lib;msimg32.lib;shlwapi.lib;wininet.lib;dutil.lib;balutil.lib;version.lib;uxtheme.lib;%(AdditionalDependencies) - $(WixInstallPath)sdk\vs2017\lib\x86 - $(WixInstallPath)sdk\vs2017\lib\x86 + $(WixInstallPath)sdk\vs2017\lib\x86 $(WixInstallPath)sdk\vs2015\lib\x86 $(WixInstallPath)sdk\vs2013\lib\x86 pythonba.def diff -Nru python3.10-3.10.11/Tools/ssl/multissltests.py python3.10-3.10.12/Tools/ssl/multissltests.py --- python3.10-3.10.11/Tools/ssl/multissltests.py 2023-04-04 21:57:15.000000000 +0000 +++ python3.10-3.10.12/Tools/ssl/multissltests.py 2023-06-06 22:30:33.000000000 +0000 @@ -47,8 +47,9 @@ ] OPENSSL_RECENT_VERSIONS = [ - "1.1.1t", - "3.0.8" + "1.1.1u", + "3.0.9", + "3.1.1", ] LIBRESSL_OLD_VERSIONS = [