pkgsrc-Changes archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
CVS commit: pkgsrc/lang/python313
Module Name: pkgsrc
Committed By: wiz
Date: Sat Oct 11 19:07:06 UTC 2025
Modified Files:
pkgsrc/lang/python313: Makefile distinfo
Added Files:
pkgsrc/lang/python313/patches:
patch-Lib_test_test__zipfile_test__core.py
patch-Lib_zipfile_____init____.py
Log Message:
python313: fix zip vulnerability
Bump PKGREVISION.
To generate a diff of this commit:
cvs rdiff -u -r1.24 -r1.25 pkgsrc/lang/python313/Makefile
cvs rdiff -u -r1.13 -r1.14 pkgsrc/lang/python313/distinfo
cvs rdiff -u -r0 -r1.1 \
pkgsrc/lang/python313/patches/patch-Lib_test_test__zipfile_test__core.py \
pkgsrc/lang/python313/patches/patch-Lib_zipfile_____init____.py
Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.
Modified files:
Index: pkgsrc/lang/python313/Makefile
diff -u pkgsrc/lang/python313/Makefile:1.24 pkgsrc/lang/python313/Makefile:1.25
--- pkgsrc/lang/python313/Makefile:1.24 Fri Aug 15 06:44:41 2025
+++ pkgsrc/lang/python313/Makefile Sat Oct 11 19:07:06 2025
@@ -1,8 +1,9 @@
-# $NetBSD: Makefile,v 1.24 2025/08/15 06:44:41 adam Exp $
+# $NetBSD: Makefile,v 1.25 2025/10/11 19:07:06 wiz Exp $
.include "dist.mk"
PKGNAME= python313-${PY_DISTVERSION}
+PKGREVISION= 1
CATEGORIES= lang python
MAINTAINER= pkgsrc-users%NetBSD.org@localhost
@@ -53,10 +54,10 @@ PTHREAD_OPTS+= require
.include "../../mk/bsd.prefs.mk"
# NetBSD-8 curses has enough support for py-curses
-USE_CURSES= getsyx update_panels wide
+USE_CURSES= getsyx update_panels wide
# But we build as ncurses still to get the full feature set easily
.if ${OPSYS} == "NetBSD"
-FAKE_NCURSES= yes
+FAKE_NCURSES= yes
## Force use of libuuid
CONFIGURE_ARGS+= ac_cv_header_uuid_h=false
.endif
@@ -208,6 +209,8 @@ setuptools-preinstall:
# for testing
ALLOW_NETWORK_ACCESS= yes
+# as of 3.13.8
+# Total test files: run=498/480 failed=7 env_changed=2 skipped=30 resource_denied=2 rerun=20
.if ${OPSYS} == "Linux"
.include "../../databases/gdbm_compat/buildlink3.mk"
Index: pkgsrc/lang/python313/distinfo
diff -u pkgsrc/lang/python313/distinfo:1.13 pkgsrc/lang/python313/distinfo:1.14
--- pkgsrc/lang/python313/distinfo:1.13 Wed Oct 8 05:55:24 2025
+++ pkgsrc/lang/python313/distinfo Sat Oct 11 19:07:06 2025
@@ -1,4 +1,4 @@
-$NetBSD: distinfo,v 1.13 2025/10/08 05:55:24 adam Exp $
+$NetBSD: distinfo,v 1.14 2025/10/11 19:07:06 wiz Exp $
BLAKE2s (Python-3.13.8.tar.xz) = 331d01f1f6c95932d77315dd0a906967bf8ecb764ef08d034c164f0ba364b67f
SHA512 (Python-3.13.8.tar.xz) = 8569959f24083824f6644b839c6ebd587e67fb74b3c5fed9ef44bfbbba46076e98db33a27218b2d44edd15b3b05ae85e94b71491598cbb871d7d43a9d6fa2b84
@@ -6,6 +6,8 @@ Size (Python-3.13.8.tar.xz) = 22681576 b
SHA1 (patch-Include_pymacro.h) = 7611315fefc305a48b4965f2f2b9bee53ae3d987
SHA1 (patch-Lib_ctypes_util.py) = 3dec1b6b7a36e46cbfa0dfcd71c5e7fac9f60764
SHA1 (patch-Lib_sysconfig_____init____.py) = 6c151d3dca0367cbb38c1175b9dba894509cf1a4
+SHA1 (patch-Lib_test_test__zipfile_test__core.py) = 94b9820125399e5b69ba7858fdbd5fcd980071b6
+SHA1 (patch-Lib_zipfile_____init____.py) = a3a740a65cbc9b82a5484912a3e8000b8f7beec8
SHA1 (patch-Makefile.pre.in) = 127d1af7947ea512da3a973eb2ed2d89990893bd
SHA1 (patch-Modules_faulthandler.c) = ca59c378d25bfc0769a7f5da887369d8c913e70c
SHA1 (patch-Modules_readline.c) = 232f6ac43b5a0e87c915f13117bae91ef069a6c1
Added files:
Index: pkgsrc/lang/python313/patches/patch-Lib_test_test__zipfile_test__core.py
diff -u /dev/null pkgsrc/lang/python313/patches/patch-Lib_test_test__zipfile_test__core.py:1.1
--- /dev/null Sat Oct 11 19:07:06 2025
+++ pkgsrc/lang/python313/patches/patch-Lib_test_test__zipfile_test__core.py Sat Oct 11 19:07:06 2025
@@ -0,0 +1,162 @@
+$NetBSD: patch-Lib_test_test__zipfile_test__core.py,v 1.1 2025/10/11 19:07:06 wiz Exp $
+
+https://github.com/python/cpython/commit/333d4a6f4967d3ace91492a39ededbcf3faa76a6
+
+--- Lib/test/test_zipfile/test_core.py.orig 2025-10-07 12:01:51.000000000 +0000
++++ Lib/test/test_zipfile/test_core.py
+@@ -884,6 +884,8 @@ class StoredTestZip64InSmallFiles(Abstra
+ self, file_size_64_set=False, file_size_extra=False,
+ compress_size_64_set=False, compress_size_extra=False,
+ header_offset_64_set=False, header_offset_extra=False,
++ extensible_data=b'',
++ end_of_central_dir_size=None, offset_to_end_of_central_dir=None,
+ ):
+ """Generate bytes sequence for a zip with (incomplete) zip64 data.
+
+@@ -937,6 +939,12 @@ class StoredTestZip64InSmallFiles(Abstra
+
+ central_dir_size = struct.pack('<Q', 58 + 8 * len(central_zip64_fields))
+ offset_to_central_dir = struct.pack('<Q', 50 + 8 * len(local_zip64_fields))
++ if end_of_central_dir_size is None:
++ end_of_central_dir_size = 44 + len(extensible_data)
++ if offset_to_end_of_central_dir is None:
++ offset_to_end_of_central_dir = (108
++ + 8 * len(local_zip64_fields)
++ + 8 * len(central_zip64_fields))
+
+ local_extra_length = struct.pack("<H", 4 + 8 * len(local_zip64_fields))
+ central_extra_length = struct.pack("<H", 4 + 8 * len(central_zip64_fields))
+@@ -965,14 +973,17 @@ class StoredTestZip64InSmallFiles(Abstra
+ + filename
+ + central_extra
+ # Zip64 end of central directory
+- + b"PK\x06\x06,\x00\x00\x00\x00\x00\x00\x00-\x00-"
+- + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00"
++ + b"PK\x06\x06"
++ + struct.pack('<Q', end_of_central_dir_size)
++ + b"-\x00-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00"
+ + b"\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00"
+ + central_dir_size
+ + offset_to_central_dir
++ + extensible_data
+ # Zip64 end of central directory locator
+- + b"PK\x06\x07\x00\x00\x00\x00l\x00\x00\x00\x00\x00\x00\x00\x01"
+- + b"\x00\x00\x00"
++ + b"PK\x06\x07\x00\x00\x00\x00"
++ + struct.pack('<Q', offset_to_end_of_central_dir)
++ + b"\x01\x00\x00\x00"
+ # end of central directory
+ + b"PK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00:\x00\x00\x002\x00"
+ + b"\x00\x00\x00\x00"
+@@ -1003,6 +1014,7 @@ class StoredTestZip64InSmallFiles(Abstra
+ with self.assertRaises(zipfile.BadZipFile) as e:
+ zipfile.ZipFile(io.BytesIO(missing_file_size_extra))
+ self.assertIn('file size', str(e.exception).lower())
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_file_size_extra)))
+
+ # zip64 file size present, zip64 compress size present, one field in
+ # extra, expecting two, equals missing compress size.
+@@ -1014,6 +1026,7 @@ class StoredTestZip64InSmallFiles(Abstra
+ with self.assertRaises(zipfile.BadZipFile) as e:
+ zipfile.ZipFile(io.BytesIO(missing_compress_size_extra))
+ self.assertIn('compress size', str(e.exception).lower())
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_compress_size_extra)))
+
+ # zip64 compress size present, no fields in extra, expecting one,
+ # equals missing compress size.
+@@ -1023,6 +1036,7 @@ class StoredTestZip64InSmallFiles(Abstra
+ with self.assertRaises(zipfile.BadZipFile) as e:
+ zipfile.ZipFile(io.BytesIO(missing_compress_size_extra))
+ self.assertIn('compress size', str(e.exception).lower())
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_compress_size_extra)))
+
+ # zip64 file size present, zip64 compress size present, zip64 header
+ # offset present, two fields in extra, expecting three, equals missing
+@@ -1037,6 +1051,7 @@ class StoredTestZip64InSmallFiles(Abstra
+ with self.assertRaises(zipfile.BadZipFile) as e:
+ zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
+ self.assertIn('header offset', str(e.exception).lower())
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
+
+ # zip64 compress size present, zip64 header offset present, one field
+ # in extra, expecting two, equals missing header offset
+@@ -1049,6 +1064,7 @@ class StoredTestZip64InSmallFiles(Abstra
+ with self.assertRaises(zipfile.BadZipFile) as e:
+ zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
+ self.assertIn('header offset', str(e.exception).lower())
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
+
+ # zip64 file size present, zip64 header offset present, one field in
+ # extra, expecting two, equals missing header offset
+@@ -1061,6 +1077,7 @@ class StoredTestZip64InSmallFiles(Abstra
+ with self.assertRaises(zipfile.BadZipFile) as e:
+ zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
+ self.assertIn('header offset', str(e.exception).lower())
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
+
+ # zip64 header offset present, no fields in extra, expecting one,
+ # equals missing header offset
+@@ -1072,6 +1089,63 @@ class StoredTestZip64InSmallFiles(Abstra
+ with self.assertRaises(zipfile.BadZipFile) as e:
+ zipfile.ZipFile(io.BytesIO(missing_header_offset_extra))
+ self.assertIn('header offset', str(e.exception).lower())
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(missing_header_offset_extra)))
++
++ def test_bad_zip64_end_of_central_dir(self):
++ zipdata = self.make_zip64_file(end_of_central_dir_size=0)
++ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*record'):
++ zipfile.ZipFile(io.BytesIO(zipdata))
++ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
++
++ zipdata = self.make_zip64_file(end_of_central_dir_size=100)
++ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*record'):
++ zipfile.ZipFile(io.BytesIO(zipdata))
++ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
++
++ zipdata = self.make_zip64_file(offset_to_end_of_central_dir=0)
++ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*record'):
++ zipfile.ZipFile(io.BytesIO(zipdata))
++ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
++
++ zipdata = self.make_zip64_file(offset_to_end_of_central_dir=1000)
++ with self.assertRaisesRegex(zipfile.BadZipFile, 'Corrupt.*locator'):
++ zipfile.ZipFile(io.BytesIO(zipdata))
++ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
++
++ def test_zip64_end_of_central_dir_record_not_found(self):
++ zipdata = self.make_zip64_file()
++ zipdata = zipdata.replace(b"PK\x06\x06", b'\x00'*4)
++ with self.assertRaisesRegex(zipfile.BadZipFile, 'record not found'):
++ zipfile.ZipFile(io.BytesIO(zipdata))
++ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
++
++ zipdata = self.make_zip64_file(
++ extensible_data=b'\xca\xfe\x04\x00\x00\x00data')
++ zipdata = zipdata.replace(b"PK\x06\x06", b'\x00'*4)
++ with self.assertRaisesRegex(zipfile.BadZipFile, 'record not found'):
++ zipfile.ZipFile(io.BytesIO(zipdata))
++ self.assertFalse(zipfile.is_zipfile(io.BytesIO(zipdata)))
++
++ def test_zip64_extensible_data(self):
++ # These values are what is set in the make_zip64_file method.
++ expected_file_size = 8
++ expected_compress_size = 8
++ expected_header_offset = 0
++ expected_content = b"test1234"
++
++ zipdata = self.make_zip64_file(
++ extensible_data=b'\xca\xfe\x04\x00\x00\x00data')
++ with zipfile.ZipFile(io.BytesIO(zipdata)) as zf:
++ zinfo = zf.infolist()[0]
++ self.assertEqual(zinfo.file_size, expected_file_size)
++ self.assertEqual(zinfo.compress_size, expected_compress_size)
++ self.assertEqual(zinfo.header_offset, expected_header_offset)
++ self.assertEqual(zf.read(zinfo), expected_content)
++ self.assertTrue(zipfile.is_zipfile(io.BytesIO(zipdata)))
++
++ with self.assertRaisesRegex(zipfile.BadZipFile, 'record not found'):
++ zipfile.ZipFile(io.BytesIO(b'prepended' + zipdata))
++ self.assertFalse(zipfile.is_zipfile(io.BytesIO(b'prepended' + zipdata)))
+
+ def test_generated_valid_zip64_extra(self):
+ # These values are what is set in the make_zip64_file method.
Index: pkgsrc/lang/python313/patches/patch-Lib_zipfile_____init____.py
diff -u /dev/null pkgsrc/lang/python313/patches/patch-Lib_zipfile_____init____.py:1.1
--- /dev/null Sat Oct 11 19:07:06 2025
+++ pkgsrc/lang/python313/patches/patch-Lib_zipfile_____init____.py Sat Oct 11 19:07:06 2025
@@ -0,0 +1,122 @@
+$NetBSD: patch-Lib_zipfile_____init____.py,v 1.1 2025/10/11 19:07:06 wiz Exp $
+
+https://github.com/python/cpython/commit/333d4a6f4967d3ace91492a39ededbcf3faa76a6
+
+--- Lib/zipfile/__init__.py.orig 2025-10-07 12:01:51.000000000 +0000
++++ Lib/zipfile/__init__.py
+@@ -245,7 +245,7 @@ def is_zipfile(filename):
+ else:
+ with open(filename, "rb") as fp:
+ result = _check_zipfile(fp)
+- except OSError:
++ except (OSError, BadZipFile):
+ pass
+ return result
+
+@@ -253,16 +253,15 @@ def _EndRecData64(fpin, offset, endrec):
+ """
+ Read the ZIP64 end-of-archive records and use that to update endrec
+ """
+- try:
+- fpin.seek(offset - sizeEndCentDir64Locator, 2)
+- except OSError:
+- # If the seek fails, the file is not large enough to contain a ZIP64
++ offset -= sizeEndCentDir64Locator
++ if offset < 0:
++ # The file is not large enough to contain a ZIP64
+ # end-of-archive record, so just return the end record we were given.
+ return endrec
+-
++ fpin.seek(offset)
+ data = fpin.read(sizeEndCentDir64Locator)
+ if len(data) != sizeEndCentDir64Locator:
+- return endrec
++ raise OSError("Unknown I/O error")
+ sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
+ if sig != stringEndArchive64Locator:
+ return endrec
+@@ -270,16 +269,33 @@ def _EndRecData64(fpin, offset, endrec):
+ if diskno != 0 or disks > 1:
+ raise BadZipFile("zipfiles that span multiple disks are not supported")
+
+- # Assume no 'zip64 extensible data'
+- fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
++ offset -= sizeEndCentDir64
++ if reloff > offset:
++ raise BadZipFile("Corrupt zip64 end of central directory locator")
++ # First, check the assumption that there is no prepended data.
++ fpin.seek(reloff)
++ extrasz = offset - reloff
+ data = fpin.read(sizeEndCentDir64)
+ if len(data) != sizeEndCentDir64:
+- return endrec
++ raise OSError("Unknown I/O error")
++ if not data.startswith(stringEndArchive64) and reloff != offset:
++ # Since we already have seen the Zip64 EOCD Locator, it's
++ # possible we got here because there is prepended data.
++ # Assume no 'zip64 extensible data'
++ fpin.seek(offset)
++ extrasz = 0
++ data = fpin.read(sizeEndCentDir64)
++ if len(data) != sizeEndCentDir64:
++ raise OSError("Unknown I/O error")
++ if not data.startswith(stringEndArchive64):
++ raise BadZipFile("Zip64 end of central directory record not found")
++
+ sig, sz, create_version, read_version, disk_num, disk_dir, \
+ dircount, dircount2, dirsize, diroffset = \
+ struct.unpack(structEndArchive64, data)
+- if sig != stringEndArchive64:
+- return endrec
++ if (diroffset + dirsize != reloff or
++ sz + 12 != sizeEndCentDir64 + extrasz):
++ raise BadZipFile("Corrupt zip64 end of central directory record")
+
+ # Update the original endrec using data from the ZIP64 record
+ endrec[_ECD_SIGNATURE] = sig
+@@ -289,6 +305,7 @@ def _EndRecData64(fpin, offset, endrec):
+ endrec[_ECD_ENTRIES_TOTAL] = dircount2
+ endrec[_ECD_SIZE] = dirsize
+ endrec[_ECD_OFFSET] = diroffset
++ endrec[_ECD_LOCATION] = offset - extrasz
+ return endrec
+
+
+@@ -322,7 +339,7 @@ def _EndRecData(fpin):
+ endrec.append(filesize - sizeEndCentDir)
+
+ # Try to read the "Zip64 end of central directory" structure
+- return _EndRecData64(fpin, -sizeEndCentDir, endrec)
++ return _EndRecData64(fpin, filesize - sizeEndCentDir, endrec)
+
+ # Either this is not a ZIP file, or it is a ZIP file with an archive
+ # comment. Search the end of the file for the "end of central directory"
+@@ -346,8 +363,7 @@ def _EndRecData(fpin):
+ endrec.append(maxCommentStart + start)
+
+ # Try to read the "Zip64 end of central directory" structure
+- return _EndRecData64(fpin, maxCommentStart + start - filesize,
+- endrec)
++ return _EndRecData64(fpin, maxCommentStart + start, endrec)
+
+ # Unable to find a valid end of central directory structure
+ return None
+@@ -1458,9 +1474,6 @@ class ZipFile:
+
+ # "concat" is zero, unless zip was concatenated to another file
+ concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
+- if endrec[_ECD_SIGNATURE] == stringEndArchive64:
+- # If Zip64 extension structures are present, account for them
+- concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
+
+ if self.debug > 2:
+ inferred = concat + offset_cd
+@@ -2082,7 +2095,7 @@ class ZipFile:
+ " would require ZIP64 extensions")
+ zip64endrec = struct.pack(
+ structEndArchive64, stringEndArchive64,
+- 44, 45, 45, 0, 0, centDirCount, centDirCount,
++ sizeEndCentDir64 - 12, 45, 45, 0, 0, centDirCount, centDirCount,
+ centDirSize, centDirOffset)
+ self.fp.write(zip64endrec)
+
Home |
Main Index |
Thread Index |
Old Index