Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-backports.zstd for
openSUSE:Factory checked in at 2026-03-29 20:00:55
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-backports.zstd (Old)
and /work/SRC/openSUSE:Factory/.python-backports.zstd.new.8177 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-backports.zstd"
Sun Mar 29 20:00:55 2026 rev:2 rq:1343408 version:1.3.0
Changes:
--------
---
/work/SRC/openSUSE:Factory/python-backports.zstd/python-backports.zstd.changes
2026-01-07 16:01:24.711110108 +0100
+++
/work/SRC/openSUSE:Factory/.python-backports.zstd.new.8177/python-backports.zstd.changes
2026-03-29 20:01:16.266862867 +0200
@@ -1,0 +2,7 @@
+Sun Mar 29 10:23:14 UTC 2026 - Dirk Müller <[email protected]>
+
+- update to 1.3.0:
+ * Python 3.13/3.14 compat. fixes
+- drop do-not-abort-with-python313.patch (upstream)
+
+-------------------------------------------------------------------
Old:
----
backports_zstd-1.0.0.tar.gz
do-not-abort-with-python313.patch
New:
----
backports_zstd-1.3.0.tar.gz
----------(Old B)----------
Old: * Python 3.13/3.14 compat. fixes
- drop do-not-abort-with-python313.patch (upstream)
----------(Old E)----------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-backports.zstd.spec ++++++
--- /var/tmp/diff_new_pack.Mfdfjw/_old 2026-03-29 20:01:16.890888577 +0200
+++ /var/tmp/diff_new_pack.Mfdfjw/_new 2026-03-29 20:01:16.890888577 +0200
@@ -1,7 +1,7 @@
#
# spec file for package python-backports.zstd
#
-# Copyright (c) 2025 SUSE LLC and contributors
+# Copyright (c) 2026 SUSE LLC and contributors
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -19,14 +19,12 @@
# Only supported with Python <= 3.13
%define skip_python314 1
Name: python-backports.zstd
-Version: 1.0.0
+Version: 1.3.0
Release: 0
Summary: Backport of compressionzstd
License: BSD-3-Clause
URL: https://github.com/rogdham/backports.zstd
Source:
https://files.pythonhosted.org/packages/source/b/backports.zstd/backports_zstd-%{version}.tar.gz
-# PATCH-FIX-UPSTREAM Based on gh#Rogdham/backports.zstd#54
-Patch0: do-not-abort-with-python313.patch
BuildRequires: %{python_module devel}
BuildRequires: %{python_module pip}
BuildRequires: %{python_module pytest}
++++++ backports_zstd-1.0.0.tar.gz -> backports_zstd-1.3.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/backports_zstd-1.0.0/PKG-INFO
new/backports_zstd-1.3.0/PKG-INFO
--- old/backports_zstd-1.0.0/PKG-INFO 2025-10-10 07:34:58.011217000 +0200
+++ new/backports_zstd-1.3.0/PKG-INFO 2025-12-29 16:59:53.430338400 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.4
Name: backports.zstd
-Version: 1.0.0
+Version: 1.3.0
Summary: Backport of compression.zstd
Author-email: Rogdham <[email protected]>
License-Expression: PSF-2.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/backports_zstd-1.0.0/pyproject.toml
new/backports_zstd-1.3.0/pyproject.toml
--- old/backports_zstd-1.0.0/pyproject.toml 2025-10-10 07:34:48.000000000
+0200
+++ new/backports_zstd-1.3.0/pyproject.toml 2025-12-29 16:59:44.000000000
+0100
@@ -1,6 +1,6 @@
[project]
name = "backports.zstd"
-version = "1.0.0"
+version = "1.3.0"
authors = [{ name = "Rogdham", email = "[email protected]" }]
description = "Backport of compression.zstd"
readme = { file = "README.md", content-type = "text/markdown" }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/backports_zstd-1.0.0/setup.py
new/backports_zstd-1.3.0/setup.py
--- old/backports_zstd-1.0.0/setup.py 2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/setup.py 2025-12-29 16:59:44.000000000 +0100
@@ -6,6 +6,10 @@
from setuptools import Extension, setup
+if not ((3, 9) <= sys.version_info < (3, 14)):
+ raise RuntimeError(f"Unsupported Python version: {sys.version}")
+
+
# create a LICENSE_zstd.txt file
# wheels distributions needs to ship the license of the zstd library
ROOT_PATH = Path(__file__).parent.absolute()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/c/compat/backports_zstd_edits.h
new/backports_zstd-1.3.0/src/c/compat/backports_zstd_edits.h
--- old/backports_zstd-1.0.0/src/c/compat/backports_zstd_edits.h
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/c/compat/backports_zstd_edits.h
2025-12-29 16:59:44.000000000 +0100
@@ -107,12 +107,14 @@
#define BACKPORTSZSTD_LOCK_lock PyMutex_Lock
#define BACKPORTSZSTD_LOCK_unlock PyMutex_Unlock
#define BACKPORTSZSTD_LOCK_free(l)
+#if PY_VERSION_HEX < 0x030E0000 // Python 3.13 and below
static inline int BACKPORTSZSTD_LOCK_isLocked(PyMutex *lp)
{
- // note: this function is only used in asserts
- // PyMutex_IsLocked is not exposed publicly
https://github.com/python/cpython/issues/134009
- Py_FatalError("Not implemented");
+ return (_Py_atomic_load_uint8(&lp->_bits) & _Py_LOCKED) != 0;
}
+#else // Python 3.14 and above
+#define BACKPORTSZSTD_LOCK_isLocked PyMutex_IsLocked
+#endif
#endif /* !BACKPORTSZSTD_LOCK */
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/c/compression_zstd/compressor.c
new/backports_zstd-1.3.0/src/c/compression_zstd/compressor.c
--- old/backports_zstd-1.0.0/src/c/compression_zstd/compressor.c
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/c/compression_zstd/compressor.c
2025-12-29 16:59:44.000000000 +0100
@@ -524,11 +524,13 @@
return NULL;
}
+#ifndef NDEBUG
static inline int
mt_continue_should_break(ZSTD_inBuffer *in, ZSTD_outBuffer *out)
{
return in->size == in->pos && out->size != out->pos;
}
+#endif
static PyObject *
compress_mt_continue_lock_held(ZstdCompressor *self, Py_buffer *data)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports/__init__.py
new/backports_zstd-1.3.0/src/python/backports/__init__.py
--- old/backports_zstd-1.0.0/src/python/backports/__init__.py 2025-10-10
07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports/__init__.py 1970-01-01
01:00:00.000000000 +0100
@@ -1,8 +0,0 @@
-# see https://github.com/brandon-rhodes/backports
-
-# A Python "namespace package" http://www.python.org/dev/peps/pep-0382/
-# This always goes inside of a namespace package's __init__.py
-
-from pkgutil import extend_path
-
-__path__ = extend_path(__path__, __name__)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports/zstd/__init__.py
new/backports_zstd-1.3.0/src/python/backports/zstd/__init__.py
--- old/backports_zstd-1.0.0/src/python/backports/zstd/__init__.py
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports/zstd/__init__.py
2025-12-29 16:59:44.000000000 +0100
@@ -1,5 +1,9 @@
"""Python bindings to the Zstandard (zstd) compression library (RFC-8878)."""
+import sys
+if not ((3, 9) <= sys.version_info < (3, 14)):
+ raise RuntimeError(f"Unsupported Python version: {sys.version}")
+
__all__ = (
# backports.zstd
'COMPRESSION_LEVEL_DEFAULT',
@@ -31,7 +35,6 @@
import backports.zstd._zstd as _zstd
import enum
-from backports.zstd._shutil import register_shutil
from backports.zstd._zstd import (ZstdCompressor, ZstdDecompressor, ZstdDict,
ZstdError,
get_frame_size, zstd_version)
from backports.zstd._zstdfile import ZstdFile, open, _nbytes
@@ -247,3 +250,11 @@
# Check validity of the CompressionParameter & DecompressionParameter types
_zstd.set_parameter_types(CompressionParameter, DecompressionParameter)
+
+
+# Lazy loading
+def __getattr__(name):
+ if name == "register_shutil":
+ from backports.zstd._shutil import register_shutil
+ return register_shutil
+ raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports/zstd/__init__.pyi
new/backports_zstd-1.3.0/src/python/backports/zstd/__init__.pyi
--- old/backports_zstd-1.0.0/src/python/backports/zstd/__init__.pyi
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports/zstd/__init__.pyi
2025-12-29 16:59:44.000000000 +0100
@@ -68,9 +68,14 @@
def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict:
...
def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer],
dict_size: int, level: int) -> ZstdDict: ...
def compress(
- data: ReadableBuffer, level: int | None = None, options: Mapping[int, int]
| None = None, zstd_dict: ZstdDict | None = None
+ data: ReadableBuffer,
+ level: int | None = None,
+ options: Mapping[int, int] | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
+) -> bytes: ...
+def decompress(
+ data: ReadableBuffer, zstd_dict: ZstdDict | tuple[ZstdDict, int] | None =
None, options: Mapping[int, int] | None = None
) -> bytes: ...
-def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None,
options: Mapping[int, int] | None = None) -> bytes: ...
@final
class CompressionParameter(enum.IntEnum):
compression_level = ...
@@ -147,7 +152,7 @@
*,
level: None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
) -> None: ...
@overload
def __init__(
@@ -158,7 +163,7 @@
*,
level: int | None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
) -> None: ...
def write(self, data: ReadableBuffer, /) -> int: ...
def flush(self, mode: _ZstdCompressorFlushBlock |
_ZstdCompressorFlushFrame = 1) -> bytes: ... # type: ignore[override]
@@ -182,7 +187,7 @@
*,
level: None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
encoding: str | None = None,
errors: str | None = None,
newline: str | None = None,
@@ -195,7 +200,7 @@
*,
level: int | None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
encoding: str | None = None,
errors: str | None = None,
newline: str | None = None,
@@ -208,7 +213,7 @@
*,
level: None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
encoding: str | None = None,
errors: str | None = None,
newline: str | None = None,
@@ -221,7 +226,7 @@
*,
level: int | None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
encoding: str | None = None,
errors: str | None = None,
newline: str | None = None,
@@ -241,7 +246,10 @@
FLUSH_BLOCK: Final = 1
FLUSH_FRAME: Final = 2
def __new__(
- cls, level: int | None = None, options: Mapping[int, int] | None =
None, zstd_dict: ZstdDict | None = None
+ cls,
+ level: int | None = None,
+ options: Mapping[int, int] | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
) -> Self: ...
def compress(
self, /, data: ReadableBuffer, mode: _ZstdCompressorContinue |
_ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 0
@@ -253,7 +261,9 @@
@final
class ZstdDecompressor:
- def __new__(cls, zstd_dict: ZstdDict | None = None, options: Mapping[int,
int] | None = None) -> Self: ...
+ def __new__(
+ cls, zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
options: Mapping[int, int] | None = None
+ ) -> Self: ...
def decompress(self, /, data: ReadableBuffer, max_length: int = -1) ->
bytes: ...
@property
def eof(self) -> bool: ...
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports/zstd/tarfile.py
new/backports_zstd-1.3.0/src/python/backports/zstd/tarfile.py
--- old/backports_zstd-1.0.0/src/python/backports/zstd/tarfile.py
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports/zstd/tarfile.py
2025-12-29 16:59:44.000000000 +0100
@@ -364,7 +364,7 @@
fileobj = _StreamProxy(fileobj)
comptype = fileobj.getcomptype()
- self.name = name or ""
+ self.name = os.fspath(name) if name is not None else ""
self.mode = mode
self.comptype = comptype
self.fileobj = fileobj
@@ -2726,6 +2726,9 @@
return
else:
if os.path.exists(tarinfo._link_target):
+ if os.path.lexists(targetpath):
+ # Avoid FileExistsError on following os.link.
+ os.unlink(targetpath)
os.link(tarinfo._link_target, targetpath)
return
except symlink_exception:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports/zstd/tarfile.pyi
new/backports_zstd-1.3.0/src/python/backports/zstd/tarfile.pyi
--- old/backports_zstd-1.0.0/src/python/backports/zstd/tarfile.pyi
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports/zstd/tarfile.pyi
2025-12-29 16:59:44.000000000 +0100
@@ -189,7 +189,7 @@
errorlevel: int | None = ...,
level: None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
) -> Self: ...
@overload
@@ -329,7 +329,7 @@
debug: int | None = ...,
errorlevel: int | None = ...,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
) -> Self: ...
@overload
@classmethod
@@ -350,7 +350,7 @@
debug: int | None = ...,
errorlevel: int | None = ...,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
) -> Self: ...
@overload
@@ -584,7 +584,7 @@
fileobj: IO[bytes] | None = None,
level: None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
*,
format: int | None = ...,
tarinfo: type[TarInfo] | None = ...,
@@ -604,7 +604,7 @@
fileobj: IO[bytes] | None = None,
level: int | None = None,
options: Mapping[int, int] | None = None,
- zstd_dict: ZstdDict | None = None,
+ zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None,
*,
format: int | None = ...,
tarinfo: type[TarInfo] | None = ...,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports/zstd/zipfile/__init__.py
new/backports_zstd-1.3.0/src/python/backports/zstd/zipfile/__init__.py
--- old/backports_zstd-1.0.0/src/python/backports/zstd/zipfile/__init__.py
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports/zstd/zipfile/__init__.py
2025-12-29 16:59:44.000000000 +0100
@@ -262,7 +262,7 @@
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
- except OSError:
+ except (OSError, BadZipFile):
pass
return result
@@ -272,9 +272,6 @@
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
- if endrec[_ECD_SIGNATURE] == stringEndArchive64:
- # If Zip64 extension structures are present, account for them
- concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if debug > 2:
inferred = concat + offset_cd
@@ -286,16 +283,15 @@
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
- try:
- fpin.seek(offset - sizeEndCentDir64Locator, 2)
- except OSError:
- # If the seek fails, the file is not large enough to contain a ZIP64
+ offset -= sizeEndCentDir64Locator
+ if offset < 0:
+ # The file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
-
+ fpin.seek(offset)
data = fpin.read(sizeEndCentDir64Locator)
if len(data) != sizeEndCentDir64Locator:
- return endrec
+ raise OSError("Unknown I/O error")
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
@@ -303,16 +299,33 @@
if diskno != 0 or disks > 1:
raise BadZipFile("zipfiles that span multiple disks are not supported")
- # Assume no 'zip64 extensible data'
- fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
+ offset -= sizeEndCentDir64
+ if reloff > offset:
+ raise BadZipFile("Corrupt zip64 end of central directory locator")
+ # First, check the assumption that there is no prepended data.
+ fpin.seek(reloff)
+ extrasz = offset - reloff
data = fpin.read(sizeEndCentDir64)
if len(data) != sizeEndCentDir64:
- return endrec
+ raise OSError("Unknown I/O error")
+ if not data.startswith(stringEndArchive64) and reloff != offset:
+ # Since we already have seen the Zip64 EOCD Locator, it's
+ # possible we got here because there is prepended data.
+ # Assume no 'zip64 extensible data'
+ fpin.seek(offset)
+ extrasz = 0
+ data = fpin.read(sizeEndCentDir64)
+ if len(data) != sizeEndCentDir64:
+ raise OSError("Unknown I/O error")
+ if not data.startswith(stringEndArchive64):
+ raise BadZipFile("Zip64 end of central directory record not found")
+
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
- if sig != stringEndArchive64:
- return endrec
+ if (diroffset + dirsize != reloff or
+ sz + 12 != sizeEndCentDir64 + extrasz):
+ raise BadZipFile("Corrupt zip64 end of central directory record")
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
@@ -322,6 +335,7 @@
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
+ endrec[_ECD_LOCATION] = offset - extrasz
return endrec
@@ -355,7 +369,7 @@
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
- return _EndRecData64(fpin, -sizeEndCentDir, endrec)
+ return _EndRecData64(fpin, filesize - sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
@@ -379,8 +393,7 @@
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
- return _EndRecData64(fpin, maxCommentStart + start - filesize,
- endrec)
+ return _EndRecData64(fpin, maxCommentStart + start, endrec)
# Unable to find a valid end of central directory structure
return None
@@ -2142,7 +2155,7 @@
" would require ZIP64 extensions")
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
- 44, 45, 45, 0, 0, centDirCount, centDirCount,
+ sizeEndCentDir64 - 12, 45, 45, 0, 0, centDirCount,
centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports/zstd/zipfile/__init__.pyi
new/backports_zstd-1.3.0/src/python/backports/zstd/zipfile/__init__.pyi
--- old/backports_zstd-1.0.0/src/python/backports/zstd/zipfile/__init__.pyi
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports/zstd/zipfile/__init__.pyi
2025-12-29 16:59:44.000000000 +0100
@@ -269,6 +269,7 @@
def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *,
strict_timestamps: bool = True) -> Self: ...
def is_dir(self) -> bool: ...
def FileHeader(self, zip64: bool | None = None) -> bytes: ...
+ def _for_archive(self, archive: ZipFile) -> Self: ...
from zipfile._path import CompleteDirs as CompleteDirs, Path as Path
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports.zstd.egg-info/PKG-INFO
new/backports_zstd-1.3.0/src/python/backports.zstd.egg-info/PKG-INFO
--- old/backports_zstd-1.0.0/src/python/backports.zstd.egg-info/PKG-INFO
2025-10-10 07:34:57.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports.zstd.egg-info/PKG-INFO
2025-12-29 16:59:53.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.4
Name: backports.zstd
-Version: 1.0.0
+Version: 1.3.0
Summary: Backport of compression.zstd
Author-email: Rogdham <[email protected]>
License-Expression: PSF-2.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/src/python/backports.zstd.egg-info/SOURCES.txt
new/backports_zstd-1.3.0/src/python/backports.zstd.egg-info/SOURCES.txt
--- old/backports_zstd-1.0.0/src/python/backports.zstd.egg-info/SOURCES.txt
2025-10-10 07:34:57.000000000 +0200
+++ new/backports_zstd-1.3.0/src/python/backports.zstd.egg-info/SOURCES.txt
2025-12-29 16:59:53.000000000 +0100
@@ -126,7 +126,6 @@
src/c/zstd/lib/legacy/zstd_v06.h
src/c/zstd/lib/legacy/zstd_v07.c
src/c/zstd/lib/legacy/zstd_v07.h
-src/python/backports/__init__.py
src/python/backports.zstd.egg-info/PKG-INFO
src/python/backports.zstd.egg-info/SOURCES.txt
src/python/backports.zstd.egg-info/dependency_links.txt
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/backports_zstd-1.0.0/tests/test/support/__init__.py
new/backports_zstd-1.3.0/tests/test/support/__init__.py
--- old/backports_zstd-1.0.0/tests/test/support/__init__.py 2025-10-10
07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/tests/test/support/__init__.py 2025-12-29
16:59:44.000000000 +0100
@@ -68,7 +68,7 @@
"BrokenIter",
"in_systemd_nspawn_sync_suppressed",
"run_no_yield_async_fn", "run_yielding_async_fn", "async_yield",
- "reset_code",
+ "reset_code", "on_github_actions"
]
@@ -1366,6 +1366,7 @@
f.__code__ = f.__code__.replace()
return f
+on_github_actions = "GITHUB_ACTIONS" in os.environ
#=======================================================================
# Check for the presence of docstrings.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/backports_zstd-1.0.0/tests/test/test_tarfile.py
new/backports_zstd-1.3.0/tests/test/test_tarfile.py
--- old/backports_zstd-1.0.0/tests/test/test_tarfile.py 2025-10-10
07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/tests/test/test_tarfile.py 2025-12-29
16:59:44.000000000 +0100
@@ -854,6 +854,57 @@
with tarfile.open(fileobj=fd, mode="r") as tf:
self.assertEqual(tf.next(), None)
+ def _setup_symlink_to_target(self, temp_dirpath):
+ target_filepath = os.path.join(temp_dirpath, "target")
+ ustar_dirpath = os.path.join(temp_dirpath, "ustar")
+ hardlink_filepath = os.path.join(ustar_dirpath, "lnktype")
+ with open(target_filepath, "wb") as f:
+ f.write(b"target")
+ os.makedirs(ustar_dirpath)
+ os.symlink(target_filepath, hardlink_filepath)
+ return target_filepath, hardlink_filepath
+
+ def _assert_on_file_content(self, filepath, digest):
+ with open(filepath, "rb") as f:
+ data = f.read()
+ self.assertEqual(sha256sum(data), digest)
+
+ @unittest.skipUnless(
+ hasattr(os, "link"), "Missing hardlink implementation"
+ )
+ @os_helper.skip_unless_symlink
+ def test_extract_hardlink_on_symlink(self):
+ """
+ This test verifies that extracting a hardlink will not follow an
+ existing symlink after a FileExistsError on os.link.
+ """
+ with os_helper.temp_dir() as DIR:
+ target_filepath, hardlink_filepath =
self._setup_symlink_to_target(DIR)
+ with tarfile.open(tarname, encoding="iso8859-1") as tar:
+ tar.extract("ustar/regtype", DIR, filter="data")
+ tar.extract("ustar/lnktype", DIR, filter="data")
+ self._assert_on_file_content(target_filepath,
sha256sum(b"target"))
+ self._assert_on_file_content(hardlink_filepath, sha256_regtype)
+
+ @unittest.skipUnless(
+ hasattr(os, "link"), "Missing hardlink implementation"
+ )
+ @os_helper.skip_unless_symlink
+ def test_extractall_hardlink_on_symlink(self):
+ """
+ This test verifies that extracting a hardlink will not follow an
+ existing symlink after a FileExistsError on os.link.
+ """
+ with os_helper.temp_dir() as DIR:
+ target_filepath, hardlink_filepath =
self._setup_symlink_to_target(DIR)
+ with tarfile.open(tarname, encoding="iso8859-1") as tar:
+ tar.extractall(
+ DIR, members=["ustar/regtype", "ustar/lnktype"],
filter="data",
+ )
+ self._assert_on_file_content(target_filepath,
sha256sum(b"target"))
+ self._assert_on_file_content(hardlink_filepath, sha256_regtype)
+
+
class MiscReadTest(MiscReadTestBase, unittest.TestCase):
test_fail_comp = None
@@ -1750,6 +1801,16 @@
finally:
os.umask(original_umask)
+ def test_pathlike_name(self):
+ expected_name = os.path.abspath(tmpname)
+ tarpath = os_helper.FakePath(tmpname)
+
+ for func in (tarfile.open, tarfile.TarFile.open):
+ with self.subTest():
+ with func(tarpath, self.mode) as tar:
+ self.assertEqual(tar.name, expected_name)
+ os_helper.unlink(tmpname)
+
class GzipStreamWriteTest(GzipTest, StreamWriteTest):
def test_source_directory_not_leaked(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/tests/test/test_zipfile/_path/test_path.py
new/backports_zstd-1.3.0/tests/test/test_zipfile/_path/test_path.py
--- old/backports_zstd-1.0.0/tests/test/test_zipfile/_path/test_path.py
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/tests/test/test_zipfile/_path/test_path.py
2025-12-29 16:59:44.000000000 +0100
@@ -274,7 +274,7 @@
zipfile_ondisk = self.zipfile_ondisk(alpharep)
pathlike = FakePath(str(zipfile_ondisk))
root = zipfile.Path(pathlike)
- root.root.close() # gh-137589
+ root.root.close()
@pass_alpharep
def test_traverse_pathlike(self, alpharep):
@@ -373,7 +373,7 @@
root = zipfile.Path(self.zipfile_ondisk(alpharep))
assert root.name == 'alpharep.zip' == root.filename.name
assert root.stem == 'alpharep' == root.filename.stem
- root.root.close() # gh-137589
+ root.root.close()
@pass_alpharep
def test_suffix(self, alpharep):
@@ -577,11 +577,11 @@
zipfile_ondisk = path_type(str(self.zipfile_ondisk(alpharep)))
root = zipfile.Path(zipfile_ondisk, at=subpath)
saved_1 = pickle.dumps(root)
- root.root.close() # gh-137589
+ root.root.close()
restored_1 = pickle.loads(saved_1)
first, *rest = restored_1.iterdir()
assert first.read_text(encoding='utf-8').startswith('content of ')
- restored_1.root.close() # gh-137589
+ restored_1.root.close()
@pass_alpharep
def test_extract_orig_with_implied_dirs(self, alpharep):
@@ -593,7 +593,7 @@
# wrap the zipfile for its side effect
zipfile.Path(zf)
zf.extractall(source_path.parent)
- zf.close() # gh-137589
+ zf.close()
@pass_alpharep
def test_getinfo_missing(self, alpharep):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/backports_zstd-1.0.0/tests/test/test_zipfile/test_core.py
new/backports_zstd-1.3.0/tests/test/test_zipfile/test_core.py
--- old/backports_zstd-1.0.0/tests/test/test_zipfile/test_core.py
2025-10-10 07:34:48.000000000 +0200
+++ new/backports_zstd-1.3.0/tests/test/test_zipfile/test_core.py
2025-12-29 16:59:44.000000000 +0100
@@ -312,29 +312,26 @@
self.assertEqual(openobj.read(1), b'2')
def test_writestr_compression(self):
- zipfp = zipfile.ZipFile(TESTFN2, "w")
- zipfp.writestr("b.txt", "hello world", compress_type=self.compression)
- info = zipfp.getinfo('b.txt')
- self.assertEqual(info.compress_type, self.compression)
- zipfp.close() # gh-137589
+ with zipfile.ZipFile(TESTFN2, "w") as zipfp:
+ zipfp.writestr("b.txt", "hello world",
compress_type=self.compression)
+ info = zipfp.getinfo('b.txt')
+ self.assertEqual(info.compress_type, self.compression)
def test_writestr_compresslevel(self):
- zipfp = zipfile.ZipFile(TESTFN2, "w", compresslevel=1)
- zipfp.writestr("a.txt", "hello world", compress_type=self.compression)
- zipfp.writestr("b.txt", "hello world", compress_type=self.compression,
- compresslevel=2)
-
- # Compression level follows the constructor.
- a_info = zipfp.getinfo('a.txt')
- self.assertEqual(a_info.compress_type, self.compression)
- self.assertEqual(a_info.compress_level, 1)
-
- # Compression level is overridden.
- b_info = zipfp.getinfo('b.txt')
- self.assertEqual(b_info.compress_type, self.compression)
- self.assertEqual(b_info._compresslevel, 2)
-
- zipfp.close() # gh-137589
+ with zipfile.ZipFile(TESTFN2, "w", compresslevel=1) as zipfp:
+ zipfp.writestr("a.txt", "hello world",
compress_type=self.compression)
+ zipfp.writestr("b.txt", "hello world",
compress_type=self.compression,
+ compresslevel=2)
+
+ # Compression level follows the constructor.
+ a_info = zipfp.getinfo('a.txt')
+ self.assertEqual(a_info.compress_type, self.compression)
+ self.assertEqual(a_info.compress_level, 1)
+
+ # Compression level is overridden.
+ b_info = zipfp.getinfo('b.txt')
+ self.assertEqual(b_info.compress_type, self.compression)
+ self.assertEqual(b_info._compresslevel, 2)
def test_read_return_size(self):
# Issue #9837: ZipExtFile.read() shouldn't return more bytes
@@ -901,6 +898,8 @@
self, file_size_64_set=False, file_size_extra=False,
compress_size_64_set=False, compress_size_extra=False,
header_offset_64_set=False, header_offset_extra=False,
+ extensible_data=b'',
+ end_of_central_dir_size=None, offset_to_end_of_central_dir=None,
):
"""Generate bytes sequence for a zip with (incomplete) zip64 data.
@@ -954,6 +953,12 @@
central_dir_size = struct.pack('<Q', 58 + 8 *
len(central_zip64_fields))
offset_to_central_dir = struct.pack('<Q', 50 + 8 *
len(local_zip64_fields))
+ if end_of_central_dir_size is None:
+ end_of_central_dir_size = 44 + len(extensible_data)
+ if offset_to_end_of_central_dir is None:
+ offset_to_end_of_central_dir = (108
+ + 8 * len(local_zip64_fields)
+ + 8 * len(central_zip64_fields))
local_extra_length = struct.pack("<H", 4 + 8 * len(local_zip64_fields))
central_extra_length = struct.pack("<H", 4 + 8 *
len(central_zip64_fields))
@@ -982,14 +987,17 @@
+ filename
+ central_extra
# Zip64 end of central directory
- + b"PK\x06\x06,\x00\x00\x00\x00\x00\x00\x00-\x00-"
- + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00"
+ + b"PK\x06\x06"
+ + struct.pack('<Q', end_of_central_dir_size)
+ +
b"-\x00-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00"
+ b"\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00"
+ central_dir_size
+ offset_to_central_dir
+ + extensible_data
# Zip64 end of central directory locator
- + b"PK\x06\x07\x00\x00\x00\x00l\x00\x00\x00\x00\x00\x00\x00\x01"
- + b"\x00\x00\x00"
+ + b"PK\x06\x07\x00\x00\x00\x00"
+ + struct.pack('<Q', offset_to_end_of_central_dir)
+ + b"\x01\x00\x00\x00"
# end of central directory
+ b"PK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00:\x00\x00\x002\x00"
+ b"\x00\x00\x00\x00"
@@ -1020,6 +1028,7 @@
with self.assertRaises(zipfile.BadZipFile) as e:
zipfile.ZipFile(io.BytesIO(missing_file_size_extra))
self.assertIn('file size', str(e.exception).lower())
+
self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_file_size_extra)))
# zip64 file size present, zip64 compress size present, one field in
# extra, expecting two, equals missing compress size.
@@ -1031,6 +1040,7 @@
with self.assertRaises(zipfile.BadZipFile) as e:
zipfile.ZipFile(io.BytesIO(missing_compress_size_extra))
self.assertIn('compress size', str(e.exception).lower())
+
self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_compress_size_extra)))
# zip64 compress size present, no fields in extra, expecting one,
# equals missing compress size.
@@ -1040,6 +1050,7 @@
with self.assertRaises(zipfile.BadZipFile) as e:
zipfile.ZipFile(io.BytesIO(missing_compress_size_extra))
self.assertIn('compress size', str(e.exception).lower())
+
self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_compress_size_extra)))
# zip64 file size present, zip64 compress size present, zip64 header
# offset present, two fields in extra, expecting three, equals missing
@@ -1054,6 +1065,7 @@
with self.assertRaises(zipfile.BadZipFile) as e:
zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
self.assertIn('header offset', str(e.exception).lower())
+
self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
# zip64 compress size present, zip64 header offset present, one field
# in extra, expecting two, equals missing header offset
@@ -1066,6 +1078,7 @@
with self.assertRaises(zipfile.BadZipFile) as e:
zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
self.assertIn('header offset', str(e.exception).lower())
+
self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
# zip64 file size present, zip64 header offset present, one field in
# extra, expecting two, equals missing header offset
@@ -1078,6 +1091,7 @@
with self.assertRaises(zipfile.BadZipFile) as e:
zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
self.assertIn('header offset', str(e.exception).lower())
+
self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
# zip64 header offset present, no fields in extra, expecting one,
# equals missing header offset
@@ -1089,6 +1103,63 @@
with self.assertRaises(zipfile.BadZipFile) as e:
zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
self.assertIn('header offset', str(e.exception).lower())
+
self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
+
+ def test_bad_zip64_end_of_central_dir(self):
+ zipdata = self.make_zip64_file(end_of_central_dir_size=0)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*record'):
+ zipfile.ZipFile(io.BytesIO(zipdata))
+ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
+
+ zipdata = self.make_zip64_file(end_of_central_dir_size=100)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*record'):
+ zipfile.ZipFile(io.BytesIO(zipdata))
+ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
+
+ zipdata = self.make_zip64_file(offset_to_end_of_central_dir=0)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*record'):
+ zipfile.ZipFile(io.BytesIO(zipdata))
+ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
+
+ zipdata = self.make_zip64_file(offset_to_end_of_central_dir=1000)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*locator'):
+ zipfile.ZipFile(io.BytesIO(zipdata))
+ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
+
+ def test_zip64_end_of_central_dir_record_not_found(self):
+ zipdata = self.make_zip64_file()
+ zipdata = zipdata.replace(b"PK\x06\x06", b'\x00'*4)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'record not found'):
+ zipfile.ZipFile(io.BytesIO(zipdata))
+ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
+
+ zipdata = self.make_zip64_file(
+ extensible_data=b'\xca\xfe\x04\x00\x00\x00data')
+ zipdata = zipdata.replace(b"PK\x06\x06", b'\x00'*4)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'record not found'):
+ zipfile.ZipFile(io.BytesIO(zipdata))
+ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
+
+ def test_zip64_extensible_data(self):
+ # These values are what is set in the make_zip64_file method.
+ expected_file_size = 8
+ expected_compress_size = 8
+ expected_header_offset = 0
+ expected_content = b"test1234"
+
+ zipdata = self.make_zip64_file(
+ extensible_data=b'\xca\xfe\x04\x00\x00\x00data')
+ with zipfile.ZipFile(io.BytesIO(zipdata)) as zf:
+ zinfo = zf.infolist()[0]
+ self.assertEqual(zinfo.file_size, expected_file_size)
+ self.assertEqual(zinfo.compress_size, expected_compress_size)
+ self.assertEqual(zinfo.header_offset, expected_header_offset)
+ self.assertEqual(zf.read(zinfo), expected_content)
+ self.assertTrue(zipfile.is_zipfile(io.BytesIO(zipdata)))
+
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'record not found'):
+ zipfile.ZipFile(io.BytesIO(b'prepended' + zipdata))
+ self.assertFalse(zipfile.is_zipfile(io.BytesIO(b'prepended' +
zipdata)))
def test_generated_valid_zip64_extra(self):
# These values are what is set in the make_zip64_file method.
@@ -2271,7 +2342,7 @@
zipf = zipfile.ZipFile(TESTFN, mode="r")
except zipfile.BadZipFile:
self.fail("Unable to create empty ZIP file in 'w' mode")
- zipf.close() # gh-137589
+ zipf.close()
zipf = zipfile.ZipFile(TESTFN, mode="a")
zipf.close()
@@ -2279,7 +2350,7 @@
zipf = zipfile.ZipFile(TESTFN, mode="r")
except:
self.fail("Unable to create empty ZIP file in 'a' mode")
- zipf.close() # gh-137589
+ zipf.close()
def test_open_empty_file(self):
# Issue 1710703: Check that opening a file with less than 22 bytes
@@ -2472,6 +2543,10 @@
@requires_zlib()
def test_full_overlap_different_names(self):
+ # The ZIP file contains two central directory entries with
+ # different names which refer to the same local header.
+ # The name of the local header matches the name of the first
+ # central directory entry.
data = (
b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00b\xed'
@@ -2501,6 +2576,10 @@
@requires_zlib()
def test_full_overlap_different_names2(self):
+ # The ZIP file contains two central directory entries with
+ # different names which refer to the same local header.
+ # The name of the local header matches the name of the second
+ # central directory entry.
data = (
b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00a\xed'
@@ -2533,6 +2612,8 @@
@requires_zlib()
def test_full_overlap_same_name(self):
+ # The ZIP file contains two central directory entries with
+ # the same name which refer to the same local header.
data = (
b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00a\xed'
@@ -2567,6 +2648,8 @@
@requires_zlib()
def test_quoted_overlap(self):
+ # The ZIP file contains two files. The second local header
+ # is contained in the range of the first file.
data = (
b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05Y\xfc'
b'8\x044\x00\x00\x00(\x04\x00\x00\x01\x00\x00\x00a\x00'
@@ -2598,6 +2681,7 @@
@requires_zlib()
def test_overlap_with_central_dir(self):
+ # The local header offset is equal to the central directory offset.
data = (
b'PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00G_|Z'
b'\xe2\x1e8\xbb\x0b\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00'
@@ -2612,11 +2696,15 @@
self.assertEqual(zi.header_offset, 0)
self.assertEqual(zi.compress_size, 11)
self.assertEqual(zi.file_size, 1033)
+ # Found central directory signature PK\x01\x02 instead of
+ # local header signature PK\x03\x04.
with self.assertRaisesRegex(zipfile.BadZipFile, 'Bad magic
number'):
zipf.read('a')
@requires_zlib()
def test_overlap_with_archive_comment(self):
+ # The local header is written after the central directory,
+ # in the archive comment.
data = (
b'PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00G_|Z'
b'\xe2\x1e8\xbb\x0b\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00'